From 34146564ba17423da8000e983722094f2426367e Mon Sep 17 00:00:00 2001 From: agroh1 <42010767+agroh1@users.noreply.github.com> Date: Mon, 25 Apr 2022 21:25:48 -0400 Subject: [PATCH 01/39] Source Amazon Ads: added `adId` to product report stream (#11660) * added adId to product report stream * simplified removal of adId * removed duplicate comment * Bumped dockerfile version * bump connector version Co-authored-by: marcosmarxm --- .../init/src/main/resources/seed/source_definitions.yaml | 2 +- .../init/src/main/resources/seed/source_specs.yaml | 2 +- .../connectors/source-amazon-ads/Dockerfile | 2 +- .../streams/report_streams/products_report.py | 8 ++++++++ docs/integrations/sources/amazon-ads.md | 1 + 5 files changed, 12 insertions(+), 3 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index adc32254614f..2c68ec2b22e4 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -15,7 +15,7 @@ - name: Amazon Ads sourceDefinitionId: c6b0a29e-1da9-4512-9002-7bfd0cba2246 dockerRepository: airbyte/source-amazon-ads - dockerImageTag: 0.1.5 + dockerImageTag: 0.1.6 documentationUrl: https://docs.airbyte.io/integrations/sources/amazon-ads icon: amazonads.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index e0a897c7fef8..4d7efe975a2a 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -87,7 +87,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-amazon-ads:0.1.5" +- dockerImage: "airbyte/source-amazon-ads:0.1.6" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/amazon-ads" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-amazon-ads/Dockerfile b/airbyte-integrations/connectors/source-amazon-ads/Dockerfile index 0fde72acea2f..bb4f4e6cb7b2 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/Dockerfile +++ b/airbyte-integrations/connectors/source-amazon-ads/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.5 +LABEL io.airbyte.version=0.1.6 LABEL io.airbyte.name=airbyte/source-amazon-ads diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/products_report.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/products_report.py index 298871406dfd..136d3d14ce82 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/products_report.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/products_report.py @@ -119,6 +119,7 @@ "campaignId", "adGroupName", "adGroupId", + "adId", "impressions", "clicks", "cost", @@ -156,6 +157,7 @@ "adGroupId", "keywordId", "keywordText", + "adId", "asin", "otherAsin", "sku", @@ -179,6 +181,7 @@ "campaignId", "adGroupName", "adGroupId", + "adId", "asin", "otherAsin", "sku", @@ -262,4 +265,9 @@ def _get_init_report_body(self, report_date: str, record_type: str, profile): if profile.accountInfo.type == "vendor": metrics_list = copy(metrics_list) metrics_list.remove("sku") + +# adId is automatically added to the report by amazon and requesting adId causes an amazon error + if "adId" in metrics_list: + metrics_list.remove("adId") + return {**body, "metrics": ",".join(metrics_list)} diff --git a/docs/integrations/sources/amazon-ads.md b/docs/integrations/sources/amazon-ads.md index 2f9e617dac3c..766b6946fa05 100644 --- a/docs/integrations/sources/amazon-ads.md +++ b/docs/integrations/sources/amazon-ads.md @@ -76,6 +76,7 @@ Start date used for generating reports starting from the specified start date. S | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| `0.1.6` | 2022-04-20 | [\#11659](https://github.com/airbytehq/airbyte/pull/11659) | Add adId to products report | | `0.1.5` | 2022-04-08 | [\#11430](https://github.com/airbytehq/airbyte/pull/11430) | `Added support OAuth2.0` | | `0.1.4` | 2022-02-21 | [\#10513](https://github.com/airbytehq/airbyte/pull/10513) | `Increasing REPORT_WAIT_TIMEOUT for supporting report generation which takes longer time ` | | `0.1.3` | 2021-12-28 | [\#8388](https://github.com/airbytehq/airbyte/pull/8388) | `Add retry if recoverable error occured for reporting stream processing` | From bd8bff90d519cda025d2d9ed664e46f4fc4c16f9 Mon Sep 17 00:00:00 2001 From: Tim Roes Date: Tue, 26 Apr 2022 14:48:50 +0200 Subject: [PATCH 02/39] Resolve cyclic dependency between analytics files (#12339) * Resolve cyclic dependency between analytics files * Refactor tracking component to hook --- airbyte-webapp/src/hooks/services/Analytics/index.tsx | 1 - .../{TrackPageAnalytics.tsx => useTrackPageAnalytics.tsx} | 6 ++---- airbyte-webapp/src/packages/cloud/cloudRoutes.tsx | 4 ++-- airbyte-webapp/src/pages/routes.tsx | 5 +++-- 4 files changed, 7 insertions(+), 9 deletions(-) rename airbyte-webapp/src/hooks/services/Analytics/{TrackPageAnalytics.tsx => useTrackPageAnalytics.tsx} (81%) diff --git a/airbyte-webapp/src/hooks/services/Analytics/index.tsx b/airbyte-webapp/src/hooks/services/Analytics/index.tsx index 22f927a30537..003a962f4d8b 100644 --- a/airbyte-webapp/src/hooks/services/Analytics/index.tsx +++ b/airbyte-webapp/src/hooks/services/Analytics/index.tsx @@ -1,2 +1 @@ -export * from "./TrackPageAnalytics"; export * from "./useAnalyticsService"; diff --git a/airbyte-webapp/src/hooks/services/Analytics/TrackPageAnalytics.tsx b/airbyte-webapp/src/hooks/services/Analytics/useTrackPageAnalytics.tsx similarity index 81% rename from airbyte-webapp/src/hooks/services/Analytics/TrackPageAnalytics.tsx rename to airbyte-webapp/src/hooks/services/Analytics/useTrackPageAnalytics.tsx index 3113f45c8975..434ad12ef243 100644 --- a/airbyte-webapp/src/hooks/services/Analytics/TrackPageAnalytics.tsx +++ b/airbyte-webapp/src/hooks/services/Analytics/useTrackPageAnalytics.tsx @@ -1,11 +1,11 @@ -import React, { useEffect } from "react"; +import { useEffect } from "react"; import useRouter from "hooks/useRouter"; import { getPageName } from "./pageNameUtils"; import { useAnalyticsService } from "./useAnalyticsService"; -export const TrackPageAnalytics: React.FC = () => { +export const useTrackPageAnalytics = () => { const { pathname } = useRouter(); const analyticsService = useAnalyticsService(); useEffect(() => { @@ -15,6 +15,4 @@ export const TrackPageAnalytics: React.FC = () => { analyticsService.page(pageName); } }, [analyticsService, pathname]); - - return null; }; diff --git a/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx b/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx index 7d02c94196cc..ec3c6ba1a488 100644 --- a/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx +++ b/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx @@ -4,8 +4,8 @@ import { useEffectOnce } from "react-use"; import LoadingPage from "components/LoadingPage"; -import { TrackPageAnalytics } from "hooks/services/Analytics/TrackPageAnalytics"; import { useAnalyticsIdentifyUser, useAnalyticsRegisterValues } from "hooks/services/Analytics/useAnalyticsService"; +import { useTrackPageAnalytics } from "hooks/services/Analytics/useTrackPageAnalytics"; import { FeatureItem, useFeatureRegisterValues } from "hooks/services/Feature"; import { useApiHealthPoll } from "hooks/services/Health"; import { OnboardingServiceProvider } from "hooks/services/Onboarding"; @@ -154,6 +154,7 @@ export const Routing: React.FC = () => { ); useAnalyticsRegisterValues(analyticsContext); useAnalyticsIdentifyUser(user?.userId); + useTrackPageAnalytics(); if (!inited) { return ; @@ -161,7 +162,6 @@ export const Routing: React.FC = () => { return ( - }> {/* Allow email verification no matter whether the user is logged in or not */} diff --git a/airbyte-webapp/src/pages/routes.tsx b/airbyte-webapp/src/pages/routes.tsx index a8bae59767a4..b8e7c1f6dd33 100644 --- a/airbyte-webapp/src/pages/routes.tsx +++ b/airbyte-webapp/src/pages/routes.tsx @@ -5,7 +5,8 @@ import { useEffectOnce } from "react-use"; import { useConfig } from "config"; import { Workspace } from "core/domain/workspace/Workspace"; -import { TrackPageAnalytics, useAnalyticsIdentifyUser, useAnalyticsRegisterValues } from "hooks/services/Analytics"; +import { useAnalyticsIdentifyUser, useAnalyticsRegisterValues } from "hooks/services/Analytics"; +import { useTrackPageAnalytics } from "hooks/services/Analytics/useTrackPageAnalytics"; import { useApiHealthPoll } from "hooks/services/Health"; import { useNotificationService } from "hooks/services/Notification"; import { OnboardingServiceProvider } from "hooks/services/Onboarding"; @@ -55,7 +56,6 @@ const useAddAnalyticsContextForWorkspace = (workspace: Workspace): void => { const MainViewRoutes: React.FC<{ workspace: Workspace }> = ({ workspace }) => { return ( - } /> } /> @@ -96,6 +96,7 @@ export const AutoSelectFirstWorkspace: React.FC<{ includePath?: boolean }> = ({ const RoutingWithWorkspace: React.FC = () => { const workspace = useCurrentWorkspace(); useAddAnalyticsContextForWorkspace(workspace); + useTrackPageAnalytics(); useApiHealthPoll(); useDemo(); From 0c12ad9136d992cde35e3975ad6056669188554c Mon Sep 17 00:00:00 2001 From: "Pedro S. Lopez" Date: Tue, 26 Apr 2022 10:16:04 -0400 Subject: [PATCH 03/39] python generators output `spec.yaml` files (#12245) * generators output spec.yaml files * source-singer generator also uses spec.yaml * update scaffold * update python cdk tutorials to use spec.yaml * add docs updates * consistency --- .../source-python-http-api/README.md.hbs | 2 +- .../acceptance-test-config.yml.hbs | 2 +- .../secrets/config.json.hbs | 2 +- .../source-python-http-api/setup.py.hbs | 2 +- .../source_{{snakeCase name}}/source.py.hbs | 4 +- .../source_{{snakeCase name}}/spec.json.hbs | 16 ------ .../source_{{snakeCase name}}/spec.yaml.hbs | 13 +++++ .../source-python/README.md.hbs | 2 +- .../acceptance-test-config.yml.hbs | 2 +- .../source-python/secrets/config.json.hbs | 2 +- .../source-python/setup.py.hbs | 2 +- .../source_{{snakeCase name}}/source.py.hbs | 6 +- .../source_{{snakeCase name}}/spec.json.hbs | 16 ------ .../source_{{snakeCase name}}/spec.yaml.hbs | 12 ++++ .../source-singer/README.md.hbs | 2 +- .../acceptance-test-config.yml.hbs | 2 +- .../source-singer/secrets/config.json.hbs | 2 +- .../source-singer/setup.py.hbs | 2 +- .../source.py.hbs | 2 +- .../spec.json.hbs | 16 ------ .../spec.yaml.hbs | 13 +++++ .../source-scaffold-source-http/README.md | 2 +- .../acceptance-test-config.yml | 2 +- .../source-scaffold-source-http/setup.py | 2 +- .../source_scaffold_source_http/source.py | 4 +- .../source_scaffold_source_http/spec.json | 16 ------ .../source_scaffold_source_http/spec.yaml | 13 +++++ .../source-scaffold-source-python/README.md | 2 +- .../acceptance-test-config.yml | 2 +- .../source-scaffold-source-python/setup.py | 2 +- .../source_scaffold_source_python/source.py | 6 +- .../source_scaffold_source_python/spec.json | 16 ------ .../source_scaffold_source_python/spec.yaml | 12 ++++ .../cdk-python/basic-concepts.md | 10 ++-- .../source-acceptance-tests-reference.md | 10 ++-- .../tutorials/building-a-python-source.md | 8 +-- .../tutorials/cdk-speedrun.md | 41 +++++++------ .../3-define-inputs.md | 57 +++++++++---------- .../4-connection-checking.md | 4 +- 39 files changed, 155 insertions(+), 176 deletions(-) delete mode 100644 airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/spec.json.hbs create mode 100644 airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/spec.yaml.hbs delete mode 100644 airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/spec.json.hbs create mode 100644 airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/spec.yaml.hbs delete mode 100644 airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/spec.json.hbs create mode 100644 airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/spec.yaml.hbs delete mode 100644 airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/spec.json create mode 100644 airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/spec.yaml delete mode 100644 airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/spec.json create mode 100644 airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/spec.yaml diff --git a/airbyte-integrations/connector-templates/source-python-http-api/README.md.hbs b/airbyte-integrations/connector-templates/source-python-http-api/README.md.hbs index 16e98927f20b..521db4270d52 100644 --- a/airbyte-integrations/connector-templates/source-python-http-api/README.md.hbs +++ b/airbyte-integrations/connector-templates/source-python-http-api/README.md.hbs @@ -40,7 +40,7 @@ To build using Gradle, from the Airbyte repository root, run: #### Create credentials **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/{{dashCase name}}) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_{{snakeCase name}}/spec.json` file. +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_{{snakeCase name}}/spec.yaml` file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. See `integration_tests/sample_config.json` for a sample config file. diff --git a/airbyte-integrations/connector-templates/source-python-http-api/acceptance-test-config.yml.hbs b/airbyte-integrations/connector-templates/source-python-http-api/acceptance-test-config.yml.hbs index 8f5fc50a3ec0..49acab015f3f 100644 --- a/airbyte-integrations/connector-templates/source-python-http-api/acceptance-test-config.yml.hbs +++ b/airbyte-integrations/connector-templates/source-python-http-api/acceptance-test-config.yml.hbs @@ -3,7 +3,7 @@ connector_image: airbyte/source-{{dashCase name}}:dev tests: spec: - - spec_path: "source_{{snakeCase name}}/spec.json" + - spec_path: "source_{{snakeCase name}}/spec.yaml" connection: - config_path: "secrets/config.json" status: "succeed" diff --git a/airbyte-integrations/connector-templates/source-python-http-api/secrets/config.json.hbs b/airbyte-integrations/connector-templates/source-python-http-api/secrets/config.json.hbs index 0662bdcb0997..f5f8933895aa 100644 --- a/airbyte-integrations/connector-templates/source-python-http-api/secrets/config.json.hbs +++ b/airbyte-integrations/connector-templates/source-python-http-api/secrets/config.json.hbs @@ -1,3 +1,3 @@ { - "fix-me": "TODO populate with needed configuration for integration tests or delete this file and any references to it. The schema of this file should match what is in your spec.json" + "fix-me": "TODO populate with needed configuration for integration tests or delete this file and any references to it. The schema of this file should match what is in your spec.yaml" } diff --git a/airbyte-integrations/connector-templates/source-python-http-api/setup.py.hbs b/airbyte-integrations/connector-templates/source-python-http-api/setup.py.hbs index 33db636d5de6..318232b5d589 100644 --- a/airbyte-integrations/connector-templates/source-python-http-api/setup.py.hbs +++ b/airbyte-integrations/connector-templates/source-python-http-api/setup.py.hbs @@ -22,7 +22,7 @@ setup( author_email="contact@airbyte.io", packages=find_packages(), install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json", "schemas/*.json", "schemas/shared/*.json"]}, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, extras_require={ "tests": TEST_REQUIREMENTS, }, diff --git a/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/source.py.hbs b/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/source.py.hbs index 0dcf43a8bb26..8ea49ae1e7f4 100644 --- a/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/source.py.hbs +++ b/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/source.py.hbs @@ -23,7 +23,7 @@ stream from a source. This pattern is the same one used by Airbyte internally to The approach here is not authoritative, and devs are free to use their own judgement. -There are additional required TODOs in the files within the integration_tests folder and the spec.json file. +There are additional required TODOs in the files within the integration_tests folder and the spec.yaml file. """ @@ -189,7 +189,7 @@ class Source{{properCase name}}(AbstractSource): See https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/source.py#L232 for an example. - :param config: the user-input config object conforming to the connector's spec.json + :param config: the user-input config object conforming to the connector's spec.yaml :param logger: logger object :return Tuple[bool, any]: (True, None) if the input config can be used to connect to the API successfully, (False, error) otherwise. """ diff --git a/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/spec.json.hbs b/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/spec.json.hbs deleted file mode 100644 index 6b8f5a28ab3b..000000000000 --- a/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/spec.json.hbs +++ /dev/null @@ -1,16 +0,0 @@ -{ - "documentationUrl": "https://docsurl.com", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "{{capitalCase name}} Spec", - "type": "object", - "required": ["TODO"], - "additionalProperties": false, - "properties": { - "TODO: This schema defines the configuration required for the source. This usually involves metadata such as database and/or authentication information.": { - "type": "string", - "description": "describe me" - } - } - } -} diff --git a/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/spec.yaml.hbs b/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/spec.yaml.hbs new file mode 100644 index 000000000000..0c615f16c6ae --- /dev/null +++ b/airbyte-integrations/connector-templates/source-python-http-api/source_{{snakeCase name}}/spec.yaml.hbs @@ -0,0 +1,13 @@ +documentationUrl: https://docsurl.com +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: {{capitalCase name}} Spec + type: object + required: + - TODO + additionalProperties: false + properties: + # 'TODO: This schema defines the configuration required for the source. This usually involves metadata such as database and/or authentication information.': + TODO: + type: string + description: describe me diff --git a/airbyte-integrations/connector-templates/source-python/README.md.hbs b/airbyte-integrations/connector-templates/source-python/README.md.hbs index 245e61992e59..421141cf2c53 100644 --- a/airbyte-integrations/connector-templates/source-python/README.md.hbs +++ b/airbyte-integrations/connector-templates/source-python/README.md.hbs @@ -37,7 +37,7 @@ From the Airbyte repository root, run: #### Create credentials **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/{{dashCase name}}) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_{{snakeCase name}}/spec.json` file. +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_{{snakeCase name}}/spec.yaml` file. Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. See `integration_tests/sample_config.json` for a sample config file. diff --git a/airbyte-integrations/connector-templates/source-python/acceptance-test-config.yml.hbs b/airbyte-integrations/connector-templates/source-python/acceptance-test-config.yml.hbs index 2fe73d94d619..2ec2ab2694f8 100644 --- a/airbyte-integrations/connector-templates/source-python/acceptance-test-config.yml.hbs +++ b/airbyte-integrations/connector-templates/source-python/acceptance-test-config.yml.hbs @@ -3,7 +3,7 @@ connector_image: airbyte/source-{{dashCase name}}:dev tests: spec: - - spec_path: "source_{{snakeCase name}}/spec.json" + - spec_path: "source_{{snakeCase name}}/spec.yaml" connection: - config_path: "secrets/config.json" status: "succeed" diff --git a/airbyte-integrations/connector-templates/source-python/secrets/config.json.hbs b/airbyte-integrations/connector-templates/source-python/secrets/config.json.hbs index dbfd336bdd1c..b494c8d9344b 100644 --- a/airbyte-integrations/connector-templates/source-python/secrets/config.json.hbs +++ b/airbyte-integrations/connector-templates/source-python/secrets/config.json.hbs @@ -1,3 +1,3 @@ { - "fix-me": "TODO populate with needed configuration for integration tests or delete this file and any references to it. The schema of this file should match what is in your spec.json" + "fix-me": "TODO populate with needed configuration for integration tests or delete this file and any references to it. The schema of this file should match what is in your spec.yaml" } diff --git a/airbyte-integrations/connector-templates/source-python/setup.py.hbs b/airbyte-integrations/connector-templates/source-python/setup.py.hbs index 8336ec0f2ee9..62f57dd1cd1e 100644 --- a/airbyte-integrations/connector-templates/source-python/setup.py.hbs +++ b/airbyte-integrations/connector-templates/source-python/setup.py.hbs @@ -21,7 +21,7 @@ setup( author_email="contact@airbyte.io", packages=find_packages(), install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, + package_data={"": ["*.json", "*.yaml"]}, extras_require={ "tests": TEST_REQUIREMENTS, }, diff --git a/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/source.py.hbs b/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/source.py.hbs index 28a08715ec03..b19f41953047 100644 --- a/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/source.py.hbs +++ b/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/source.py.hbs @@ -30,7 +30,7 @@ class Source{{properCase name}}(Source): :param logger: Logging object to display debug/info/error to the logs (logs will not be accessible via airbyte UI if they are not passed to this logger) :param config: Json object containing the configuration of this source, content of this json is as specified in - the properties of the spec.json file + the properties of the spec.yaml file :return: AirbyteConnectionStatus indicating a Success or Failure """ @@ -50,7 +50,7 @@ class Source{{properCase name}}(Source): :param logger: Logging object to display debug/info/error to the logs (logs will not be accessible via airbyte UI if they are not passed to this logger) :param config: Json object containing the configuration of this source, content of this json is as specified in - the properties of the spec.json file + the properties of the spec.yaml file :return: AirbyteCatalog is an object describing a list of all available streams in this source. A stream is an AirbyteStream object that includes: @@ -82,7 +82,7 @@ class Source{{properCase name}}(Source): :param logger: Logging object to display debug/info/error to the logs (logs will not be accessible via airbyte UI if they are not passed to this logger) :param config: Json object containing the configuration of this source, content of this json is as specified in - the properties of the spec.json file + the properties of the spec.yaml file :param catalog: The input catalog is a ConfiguredAirbyteCatalog which is almost the same as AirbyteCatalog returned by discover(), but in addition, it's been configured in the UI! For each particular stream and field, there may have been provided diff --git a/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/spec.json.hbs b/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/spec.json.hbs deleted file mode 100644 index 5be21867a124..000000000000 --- a/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/spec.json.hbs +++ /dev/null @@ -1,16 +0,0 @@ -{ - "documentationUrl": "https://docsurl.com", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "{{capitalCase name}} Spec", - "type": "object", - "required": ["fix-me"], - "additionalProperties": false, - "properties": { - "fix-me": { - "type": "string", - "description": "describe me" - } - } - } -} diff --git a/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/spec.yaml.hbs b/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/spec.yaml.hbs new file mode 100644 index 000000000000..924aa67e577d --- /dev/null +++ b/airbyte-integrations/connector-templates/source-python/source_{{snakeCase name}}/spec.yaml.hbs @@ -0,0 +1,12 @@ +documentationUrl: https://docsurl.com +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: {{capitalCase name}} Spec + type: object + required: + - fix-me + additionalProperties: false + properties: + fix-me: + type: string + description: describe me diff --git a/airbyte-integrations/connector-templates/source-singer/README.md.hbs b/airbyte-integrations/connector-templates/source-singer/README.md.hbs index 5f41d0b567a1..775509d192dd 100644 --- a/airbyte-integrations/connector-templates/source-singer/README.md.hbs +++ b/airbyte-integrations/connector-templates/source-singer/README.md.hbs @@ -37,7 +37,7 @@ From the Airbyte repository root, run: #### Create credentials **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/{{dashCase name}}) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_{{snakeCase name}}_singer/spec.json` file. +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_{{snakeCase name}}_singer/spec.yaml` file. Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. See `integration_tests/sample_config.json` for a sample config file. diff --git a/airbyte-integrations/connector-templates/source-singer/acceptance-test-config.yml.hbs b/airbyte-integrations/connector-templates/source-singer/acceptance-test-config.yml.hbs index 74ed41b646fc..f485a8c6460d 100644 --- a/airbyte-integrations/connector-templates/source-singer/acceptance-test-config.yml.hbs +++ b/airbyte-integrations/connector-templates/source-singer/acceptance-test-config.yml.hbs @@ -3,7 +3,7 @@ connector_image: airbyte/source-{{dashCase name}}-singer:dev tests: spec: - - spec_path: "source_{{snakeCase name}}_singer/spec.json" + - spec_path: "source_{{snakeCase name}}_singer/spec.yaml" connection: - config_path: "secrets/config.json" status: "succeed" diff --git a/airbyte-integrations/connector-templates/source-singer/secrets/config.json.hbs b/airbyte-integrations/connector-templates/source-singer/secrets/config.json.hbs index 0662bdcb0997..f5f8933895aa 100644 --- a/airbyte-integrations/connector-templates/source-singer/secrets/config.json.hbs +++ b/airbyte-integrations/connector-templates/source-singer/secrets/config.json.hbs @@ -1,3 +1,3 @@ { - "fix-me": "TODO populate with needed configuration for integration tests or delete this file and any references to it. The schema of this file should match what is in your spec.json" + "fix-me": "TODO populate with needed configuration for integration tests or delete this file and any references to it. The schema of this file should match what is in your spec.yaml" } diff --git a/airbyte-integrations/connector-templates/source-singer/setup.py.hbs b/airbyte-integrations/connector-templates/source-singer/setup.py.hbs index a621d517c4aa..a3f2d73a1ec3 100644 --- a/airbyte-integrations/connector-templates/source-singer/setup.py.hbs +++ b/airbyte-integrations/connector-templates/source-singer/setup.py.hbs @@ -22,7 +22,7 @@ setup( author_email="contact@airbyte.io", packages=find_packages(), install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, + package_data={"": ["*.json", "*.yaml"]}, extras_require={ "tests": TEST_REQUIREMENTS, }, diff --git a/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/source.py.hbs b/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/source.py.hbs index 19955ac86c4a..0b5f7e8fa593 100644 --- a/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/source.py.hbs +++ b/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/source.py.hbs @@ -22,7 +22,7 @@ class Source{{properCase name}}Singer(SingerSource): (logs will not be accessible via airbyte UI if they are not passed to this logger) :param config_path: Path to the file containing the configuration json config :param config: Json object containing the configuration of this source, content of this json is as specified in - the properties of the spec.json file + the properties of the spec.yaml file :return: AirbyteConnectionStatus indicating a Success or Failure """ diff --git a/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/spec.json.hbs b/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/spec.json.hbs deleted file mode 100644 index cb0cbb12a177..000000000000 --- a/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/spec.json.hbs +++ /dev/null @@ -1,16 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.io/integrations/sources/{{snakeCase name}}", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Source {{capitalCase name}} Singer Spec", - "type": "object", - "required": ["TODO"], - "additionalProperties": true, - "properties": { - "TODO -- add all the properties required to configure this tap e.g: username, password, api token, etc.": { - "type": "string", - "description": "describe me" - } - } - } -} diff --git a/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/spec.yaml.hbs b/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/spec.yaml.hbs new file mode 100644 index 000000000000..7e72c4734621 --- /dev/null +++ b/airbyte-integrations/connector-templates/source-singer/source_{{snakeCase name}}_singer/spec.yaml.hbs @@ -0,0 +1,13 @@ +documentationUrl: https://docs.airbyte.io/integrations/sources/{{snakeCase name}} +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Source {{capitalCase name}} Singer Spec + type: object + required: + - TODO + additionalProperties: true + properties: + # TODO -- add all the properties required to configure this tap e.g: username, password, api token, etc. + TODO: + type: string + description: describe me diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/README.md b/airbyte-integrations/connectors/source-scaffold-source-http/README.md index f238246865e1..8dd88c473632 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-http/README.md +++ b/airbyte-integrations/connectors/source-scaffold-source-http/README.md @@ -40,7 +40,7 @@ To build using Gradle, from the Airbyte repository root, run: #### Create credentials **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/scaffold-source-http) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_scaffold_source_http/spec.json` file. +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_scaffold_source_http/spec.yaml` file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. See `integration_tests/sample_config.json` for a sample config file. diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/acceptance-test-config.yml b/airbyte-integrations/connectors/source-scaffold-source-http/acceptance-test-config.yml index bfeffee388b8..a625390b4d5e 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-http/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-scaffold-source-http/acceptance-test-config.yml @@ -3,7 +3,7 @@ connector_image: airbyte/source-scaffold-source-http:dev tests: spec: - - spec_path: "source_scaffold_source_http/spec.json" + - spec_path: "source_scaffold_source_http/spec.yaml" connection: - config_path: "secrets/config.json" status: "succeed" diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/setup.py b/airbyte-integrations/connectors/source-scaffold-source-http/setup.py index 2f5242793720..bc75fcc2e372 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-http/setup.py +++ b/airbyte-integrations/connectors/source-scaffold-source-http/setup.py @@ -22,7 +22,7 @@ author_email="contact@airbyte.io", packages=find_packages(), install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json", "schemas/*.json", "schemas/shared/*.json"]}, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, extras_require={ "tests": TEST_REQUIREMENTS, }, diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/source.py b/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/source.py index 7bb4d5e8765f..7bc1036f4b4f 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/source.py +++ b/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/source.py @@ -23,7 +23,7 @@ The approach here is not authoritative, and devs are free to use their own judgement. -There are additional required TODOs in the files within the integration_tests folder and the spec.json file. +There are additional required TODOs in the files within the integration_tests folder and the spec.yaml file. """ @@ -189,7 +189,7 @@ def check_connection(self, logger, config) -> Tuple[bool, any]: See https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/source.py#L232 for an example. - :param config: the user-input config object conforming to the connector's spec.json + :param config: the user-input config object conforming to the connector's spec.yaml :param logger: logger object :return Tuple[bool, any]: (True, None) if the input config can be used to connect to the API successfully, (False, error) otherwise. """ diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/spec.json b/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/spec.json deleted file mode 100644 index 63ad05756e57..000000000000 --- a/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/spec.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "documentationUrl": "https://docsurl.com", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Scaffold Source Http Spec", - "type": "object", - "required": ["TODO"], - "additionalProperties": false, - "properties": { - "TODO: This schema defines the configuration required for the source. This usually involves metadata such as database and/or authentication information.": { - "type": "string", - "description": "describe me" - } - } - } -} diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/spec.yaml b/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/spec.yaml new file mode 100644 index 000000000000..f1c28b63d292 --- /dev/null +++ b/airbyte-integrations/connectors/source-scaffold-source-http/source_scaffold_source_http/spec.yaml @@ -0,0 +1,13 @@ +documentationUrl: https://docsurl.com +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Scaffold Source Http Spec + type: object + required: + - TODO + additionalProperties: false + properties: + # 'TODO: This schema defines the configuration required for the source. This usually involves metadata such as database and/or authentication information.': + TODO: + type: string + description: describe me diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/README.md b/airbyte-integrations/connectors/source-scaffold-source-python/README.md index 1f62dd697ac7..4aca35f0788e 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-python/README.md +++ b/airbyte-integrations/connectors/source-scaffold-source-python/README.md @@ -37,7 +37,7 @@ From the Airbyte repository root, run: #### Create credentials **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/scaffold-source-python) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_scaffold_source_python/spec.json` file. +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_scaffold_source_python/spec.yaml` file. Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. See `integration_tests/sample_config.json` for a sample config file. diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/acceptance-test-config.yml b/airbyte-integrations/connectors/source-scaffold-source-python/acceptance-test-config.yml index 0535967aefa4..9e655296a5fa 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-python/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-scaffold-source-python/acceptance-test-config.yml @@ -3,7 +3,7 @@ connector_image: airbyte/source-scaffold-source-python:dev tests: spec: - - spec_path: "source_scaffold_source_python/spec.json" + - spec_path: "source_scaffold_source_python/spec.yaml" connection: - config_path: "secrets/config.json" status: "succeed" diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/setup.py b/airbyte-integrations/connectors/source-scaffold-source-python/setup.py index a648354f8b45..6640fcc539c7 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-python/setup.py +++ b/airbyte-integrations/connectors/source-scaffold-source-python/setup.py @@ -21,7 +21,7 @@ author_email="contact@airbyte.io", packages=find_packages(), install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, + package_data={"": ["*.json", "*.yaml"]}, extras_require={ "tests": TEST_REQUIREMENTS, }, diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/source.py b/airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/source.py index a4f065065578..dd62c14843c6 100644 --- a/airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/source.py +++ b/airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/source.py @@ -30,7 +30,7 @@ def check(self, logger: AirbyteLogger, config: json) -> AirbyteConnectionStatus: :param logger: Logging object to display debug/info/error to the logs (logs will not be accessible via airbyte UI if they are not passed to this logger) :param config: Json object containing the configuration of this source, content of this json is as specified in - the properties of the spec.json file + the properties of the spec.yaml file :return: AirbyteConnectionStatus indicating a Success or Failure """ @@ -50,7 +50,7 @@ def discover(self, logger: AirbyteLogger, config: json) -> AirbyteCatalog: :param logger: Logging object to display debug/info/error to the logs (logs will not be accessible via airbyte UI if they are not passed to this logger) :param config: Json object containing the configuration of this source, content of this json is as specified in - the properties of the spec.json file + the properties of the spec.yaml file :return: AirbyteCatalog is an object describing a list of all available streams in this source. A stream is an AirbyteStream object that includes: @@ -82,7 +82,7 @@ def read( :param logger: Logging object to display debug/info/error to the logs (logs will not be accessible via airbyte UI if they are not passed to this logger) :param config: Json object containing the configuration of this source, content of this json is as specified in - the properties of the spec.json file + the properties of the spec.yaml file :param catalog: The input catalog is a ConfiguredAirbyteCatalog which is almost the same as AirbyteCatalog returned by discover(), but in addition, it's been configured in the UI! For each particular stream and field, there may have been provided diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/spec.json b/airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/spec.json deleted file mode 100644 index b4f62b06a8e8..000000000000 --- a/airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/spec.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "documentationUrl": "https://docsurl.com", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Scaffold Source Python Spec", - "type": "object", - "required": ["fix-me"], - "additionalProperties": false, - "properties": { - "fix-me": { - "type": "string", - "description": "describe me" - } - } - } -} diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/spec.yaml b/airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/spec.yaml new file mode 100644 index 000000000000..a9b889fe7328 --- /dev/null +++ b/airbyte-integrations/connectors/source-scaffold-source-python/source_scaffold_source_python/spec.yaml @@ -0,0 +1,12 @@ +documentationUrl: https://docsurl.com +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Scaffold Source Python Spec + type: object + required: + - fix-me + additionalProperties: false + properties: + fix-me: + type: string + description: describe me diff --git a/docs/connector-development/cdk-python/basic-concepts.md b/docs/connector-development/cdk-python/basic-concepts.md index f8c65f1c395e..e446ccb42c2d 100644 --- a/docs/connector-development/cdk-python/basic-concepts.md +++ b/docs/connector-development/cdk-python/basic-concepts.md @@ -30,17 +30,15 @@ The Source contains one or more **Streams** \(or **Airbyte Streams**\). A **Stre Airbyte provides abstract base classes which make it much easier to perform certain categories of tasks e.g: `HttpStream` makes it easy to create HTTP API-based streams. However, if those do not satisfy your use case \(for example, if you're pulling data from a relational database\), you can always directly implement the Airbyte Protocol by subclassing the CDK's `Source` class. +The `Source` class implements the `Spec` operation by looking for a file named `spec.yaml` (or `spec.json`) in the module's root by default. This is expected to be a json schema file that specifies the required configuration. Here is an [example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-exchange-rates/source_exchange_rates/spec.yaml) from the Exchange Rates source. + Note that while this is the most flexible way to implement a source connector, it is also the most toilsome as you will be required to manually manage state, input validation, correctly conforming to the Airbyte Protocol message formats, and more. We recommend using a subclass of `Source` unless you cannot fulfill your use case otherwise. ## The `AbstractSource` Object `AbstractSource` is a more opinionated implementation of `Source`. It implements `Source`'s 4 methods as follows: -`Spec` and `Check` are the `AbstractSource`'s simplest operations. - -`Spec` returns a checked in json schema file specifying the required configuration. The `AbstractSource` looks for a file named `spec.json` in the module's root by default. Here is an [example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-exchange-rates/source_exchange_rates/spec.json). - -`Check` delegates to the `AbstractSource`'s `check_connection` function. The function's `config` parameter contains the user-provided configuration, specified in the `spec.json` returned by `Spec`. `check_connection` uses this configuration to validate access and permissioning. Here is an [example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-exchange-rates/source_exchange_rates/source.py#L90) from the same Exchange Rates API. +`Check` delegates to the `AbstractSource`'s `check_connection` function. The function's `config` parameter contains the user-provided configuration, specified in the `spec.yaml` returned by `Spec`. `check_connection` uses this configuration to validate access and permissioning. Here is an [example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-exchange-rates/source_exchange_rates/source.py#L90) from the same Exchange Rates API. ### The `Stream` Abstract Base Class @@ -58,7 +56,7 @@ A summary of what we've covered so far on how to use the Airbyte CDK: * This involves, 1. implementing the `check_connection`function. 2. Creating the appropriate `Stream` classes and returning them in the `streams` function. - 3. placing the above mentioned `spec.json` file in the right place. + 3. placing the above mentioned `spec.yaml` file in the right place. ## HTTP Streams diff --git a/docs/connector-development/testing-connectors/source-acceptance-tests-reference.md b/docs/connector-development/testing-connectors/source-acceptance-tests-reference.md index 0e6f3c8e088f..ca63ca12844d 100644 --- a/docs/connector-development/testing-connectors/source-acceptance-tests-reference.md +++ b/docs/connector-development/testing-connectors/source-acceptance-tests-reference.md @@ -24,7 +24,7 @@ Create `acceptance-test-config.yml`. In most cases, your connector already has t connector_image: airbyte/source-some-connector:dev tests: spec: - - spec_path: "some_folder/spec.json" + - spec_path: "some_folder/spec.yaml" ``` Build your connector image if needed. @@ -95,10 +95,10 @@ tests: # Tests configuration Verify that a spec operation issued to the connector returns a valid spec. -| Input | Type | Default | Note | -| :--- | :--- | :--- | :--- | -| `spec_path` | string | `secrets/spec.json` | Path to a JSON object representing the spec expected to be output by this connector | -| `timeout_seconds` | int | 10 | Test execution timeout in seconds | +| Input | Type | Default | Note | +| :--- | :--- | :--- |:-------------------------------------------------------------------------------------------------| +| `spec_path` | string | `secrets/spec.json` | Path to a YAML or JSON file representing the spec expected to be output by this connector | +| `timeout_seconds` | int | 10 | Test execution timeout in seconds | ## Test Connection diff --git a/docs/connector-development/tutorials/building-a-python-source.md b/docs/connector-development/tutorials/building-a-python-source.md index 7aeaa820f9e9..c3162d02a0fa 100644 --- a/docs/connector-development/tutorials/building-a-python-source.md +++ b/docs/connector-development/tutorials/building-a-python-source.md @@ -21,7 +21,7 @@ All the commands below assume that `python` points to a version of python >3. * Step 1: Create the source using template * Step 2: Build the newly generated source * Step 3: Set up your Airbyte development environment -* Step 4: Implement `spec` \(and define the specification for the source `airbyte-integrations/connectors/source-/spec.json`\) +* Step 4: Implement `spec` \(and define the specification for the source `airbyte-integrations/connectors/source-/spec.yaml`\) * Step 5: Implement `check` * Step 6: Implement `discover` * Step 7: Implement `read` @@ -157,15 +157,15 @@ The nice thing about this approach is that you are running your source exactly a ### Step 4: Implement `spec` -Each source contains a specification that describes what inputs it needs in order for it to pull data. This file can be found in `airbyte-integrations/connectors/source-/spec.json`. This is a good place to start when developing your source. Using JsonSchema define what the inputs are \(e.g. username and password\). Here's [an example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json) of what the `spec.json` looks like for the postgres source. +Each source contains a specification that describes what inputs it needs in order for it to pull data. This file can be found in `airbyte-integrations/connectors/source-/spec.yaml`. This is a good place to start when developing your source. Using JsonSchema define what the inputs are \(e.g. username and password\). Here's [an example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/spec.yaml) of what the `spec.yaml` looks like for the stripe source. For more details on what the spec is, you can read about the Airbyte Protocol [here](../../understanding-airbyte/airbyte-specification.md). -The generated code that Airbyte provides, handles implementing the `spec` method for you. It assumes that there will be a file called `spec.json` in the same directory as `source.py`. If you have declared the necessary JsonSchema in `spec.json` you should be done with this step. +The generated code that Airbyte provides, handles implementing the `spec` method for you. It assumes that there will be a file called `spec.yaml` in the same directory as `source.py`. If you have declared the necessary JsonSchema in `spec.yaml` you should be done with this step. ### Step 5: Implement `check` -As described in the template code, this method takes in a json object called config that has the values described in the `spec.json` filled in. In other words if the `spec.json` said that the source requires a `username` and `password` the config object might be `{ "username": "airbyte", "password": "password123" }`. It returns a json object that reports, given the credentials in the config, whether we were able to connect to the source. For example, with the given credentials could the source connect to the database server. +As described in the template code, this method takes in a json object called config that has the values described in the `spec.yaml` filled in. In other words if the `spec.yaml` said that the source requires a `username` and `password` the config object might be `{ "username": "airbyte", "password": "password123" }`. It returns a json object that reports, given the credentials in the config, whether we were able to connect to the source. For example, with the given credentials could the source connect to the database server. While developing, we recommend storing this object in `secrets/config.json`. The `secrets` directory is gitignored by default. diff --git a/docs/connector-development/tutorials/cdk-speedrun.md b/docs/connector-development/tutorials/cdk-speedrun.md index 552c1bf532a9..56977c1889ac 100644 --- a/docs/connector-development/tutorials/cdk-speedrun.md +++ b/docs/connector-development/tutorials/cdk-speedrun.md @@ -38,27 +38,26 @@ pip install -r requirements.txt cd source_python_http_example ``` -We're working with the PokeAPI, so we need to define our input schema to reflect that. Open the `spec.json` file here and replace it with: - -```javascript -{ - "documentationUrl": "https://docs.airbyte.io/integrations/sources/pokeapi", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Pokeapi Spec", - "type": "object", - "required": ["pokemon_name"], - "additionalProperties": false, - "properties": { - "pokemon_name": { - "type": "string", - "description": "Pokemon requested from the API.", - "pattern": "^[a-z0-9_\\-]+$", - "examples": ["ditto, luxray, snorlax"] - } - } - } -} +We're working with the PokeAPI, so we need to define our input schema to reflect that. Open the `spec.yaml` file here and replace it with: + +```yaml +documentationUrl: https://docs.airbyte.io/integrations/sources/pokeapi +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Pokeapi Spec + type: object + required: + - pokemon_name + additionalProperties: false + properties: + pokemon_name: + type: string + description: Pokemon requested from the API. + pattern: ^[a-z0-9_\-]+$ + examples: + - ditto + - luxray + - snorlax ``` As you can see, we have one input to our input schema, which is `pokemon_name`, which is required. Normally, input schemas will contain information such as API keys and client secrets that need to get passed down to all endpoints or streams. diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/3-define-inputs.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/3-define-inputs.md index f9f52b896b8c..e4df89230e20 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/3-define-inputs.md +++ b/docs/connector-development/tutorials/cdk-tutorial-python-http/3-define-inputs.md @@ -2,38 +2,37 @@ Each connector declares the inputs it needs to read data from the underlying data source. This is the Airbyte Protocol's `spec` operation. -The simplest way to implement this is by creating a `.json` file in `source_/spec.json` which describes your connector's inputs according to the [ConnectorSpecification](https://github.com/airbytehq/airbyte/blob/master/airbyte-protocol/models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml#L257) schema. This is a good place to start when developing your source. Using JsonSchema, define what the inputs are \(e.g. username and password\). Here's [an example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/spec.json) of what the `spec.json` looks like for the Freshdesk API source. +The simplest way to implement this is by creating a `spec.yaml` file in `source_/spec.yaml` which describes your connector's inputs according to the [ConnectorSpecification](https://github.com/airbytehq/airbyte/blob/master/airbyte-protocol/models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml#L257) schema. This is a good place to start when developing your source. Using JsonSchema, define what the inputs are \(e.g. username and password\). Here's [an example](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/spec.yaml) of what the `spec.yaml` looks like for the Stripe API source. For more details on what the spec is, you can read about the Airbyte Protocol [here](https://docs.airbyte.io/understanding-airbyte/airbyte-specification). -The generated code that Airbyte provides, handles implementing the `spec` method for you. It assumes that there will be a file called `spec.json` in the same directory as `source.py`. If you have declared the necessary JsonSchema in `spec.json` you should be done with this step. - -Given that we'll pulling currency data for our example source, we'll define the following `spec.json`: - -```text -{ - "documentationUrl": "https://docs.airbyte.io/integrations/sources/exchangeratesapi", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Python Http Tutorial Spec", - "type": "object", - "required": ["start_date", "base"], - "additionalProperties": false, - "properties": { - "start_date": { - "type": "string", - "description": "Start getting data from that date.", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$", - "examples": ["%Y-%m-%d"] - }, - "base": { - "type": "string", - "examples": ["USD", "EUR"], - "description": "ISO reference currency. See here." - } - } - } -} +The generated code that Airbyte provides, handles implementing the `spec` method for you. It assumes that there will be a file called `spec.yaml` in the same directory as `source.py`. If you have declared the necessary JsonSchema in `spec.yaml` you should be done with this step. + +Given that we'll pulling currency data for our example source, we'll define the following `spec.yaml`: + +```yaml +documentationUrl: https://docs.airbyte.io/integrations/sources/exchangeratesapi +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Python Http Tutorial Spec + type: object + required: + - start_date + - base + additionalProperties: false + properties: + start_date: + type: string + description: Start getting data from that date. + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ + examples: + - "%Y-%m-%d" + base: + type: string + examples: + - USD + - EUR + description: "ISO reference currency. See here." ``` In addition to metadata, we define two inputs: diff --git a/docs/connector-development/tutorials/cdk-tutorial-python-http/4-connection-checking.md b/docs/connector-development/tutorials/cdk-tutorial-python-http/4-connection-checking.md index cb037c36a1b8..367dcef39ce8 100644 --- a/docs/connector-development/tutorials/cdk-tutorial-python-http/4-connection-checking.md +++ b/docs/connector-development/tutorials/cdk-tutorial-python-http/4-connection-checking.md @@ -2,7 +2,7 @@ The second operation in the Airbyte Protocol that we'll implement is the `check` operation. -This operation verifies that the input configuration supplied by the user can be used to connect to the underlying data source. Note that this user-supplied configuration has the values described in the `spec.json` filled in. In other words if the `spec.json` said that the source requires a `username` and `password` the config object might be `{ "username": "airbyte", "password": "password123" }`. You should then implement something that returns a json object reporting, given the credentials in the config, whether we were able to connect to the source. +This operation verifies that the input configuration supplied by the user can be used to connect to the underlying data source. Note that this user-supplied configuration has the values described in the `spec.yaml` filled in. In other words if the `spec.yaml` said that the source requires a `username` and `password` the config object might be `{ "username": "airbyte", "password": "password123" }`. You should then implement something that returns a json object reporting, given the credentials in the config, whether we were able to connect to the source. In our case, this is a fairly trivial check since the API requires no credentials. Instead, let's verify that the user-input `base` currency is a legitimate currency. In `source.py` we'll find the following autogenerated source: @@ -16,7 +16,7 @@ class SourcePythonHttpTutorial(AbstractSource): See https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/connectors/source-stripe/source_stripe/source.py#L232 for an example. - :param config: the user-input config object conforming the connector's spec.json + :param config: the user-input config object conforming the connector's spec.yaml :param logger: logger object :return Tuple[bool, any]: (True, None) if the input config can be used to connect to the API successfully, (False, error) otherwise. """ From 367b863ed2e4215690766743a6c488ba0b0512b1 Mon Sep 17 00:00:00 2001 From: Subodh Kant Chaturvedi Date: Tue, 26 Apr 2022 19:49:50 +0530 Subject: [PATCH 04/39] implement migration to create workspace_service_account table (#11943) * implement migration to create workspace_service_account table * make all columns non nullable * introduce persistence code for service account table (#11944) * implement persistence code for workspace_service_account table * update yaml * implement secret handling for workspace_service_account table (#11946) * implement secret handling for workspace_service_account table * add new line to the mock json * get rid of file * address review comments * update method name and add comment --- .../airbyte/bootloader/BootloaderAppTest.java | 2 +- .../java/io/airbyte/config/ConfigSchema.java | 5 + .../types/WorkspaceServiceAccount.yaml | 31 ++++ airbyte-config/persistence/build.gradle | 1 + .../config/persistence/ConfigRepository.java | 12 ++ .../DatabaseConfigPersistence.java | 86 +++++++++ .../config/persistence/DbConverter.java | 13 ++ .../persistence/SecretsRepositoryReader.java | 15 ++ .../persistence/SecretsRepositoryWriter.java | 59 +++++++ .../split_secrets/NoOpSecretsHydrator.java | 5 + .../split_secrets/RealSecretsHydrator.java | 5 + .../SecretCoordinateToPayload.java | 13 ++ .../split_secrets/SecretsHelpers.java | 71 +++++++- .../split_secrets/SecretsHydrator.java | 8 + .../BaseDatabaseConfigPersistenceTest.java | 2 +- ...baseConfigPersistenceE2EReadWriteTest.java | 17 ++ .../airbyte/config/persistence/MockData.java | 52 ++++++ .../SecretsRepositoryReaderTest.java | 103 +++++++++++ .../SecretsRepositoryWriterTest.java | 166 ++++++++++++++++++ ...1__CreateWorkspaceServiceAccountTable.java | 63 +++++++ .../configs_database/schema_dump.txt | 22 +++ 21 files changed, 743 insertions(+), 8 deletions(-) create mode 100644 airbyte-config/models/src/main/resources/types/WorkspaceServiceAccount.yaml create mode 100644 airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretCoordinateToPayload.java create mode 100644 airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_65_001__CreateWorkspaceServiceAccountTable.java diff --git a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java index 310e12468c5c..243bbb8227b7 100644 --- a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java +++ b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java @@ -80,7 +80,7 @@ void testBootloaderAppBlankDb() throws Exception { mockedConfigs.getConfigDatabaseUrl()) .getAndInitialize(); val configsMigrator = new ConfigsDatabaseMigrator(configDatabase, this.getClass().getName()); - assertEquals("0.35.59.004", configsMigrator.getLatestMigration().getVersion().getVersion()); + assertEquals("0.35.65.001", configsMigrator.getLatestMigration().getVersion().getVersion()); val jobsPersistence = new DefaultJobPersistence(jobDatabase); assertEquals(version, jobsPersistence.getVersion().get()); diff --git a/airbyte-config/models/src/main/java/io/airbyte/config/ConfigSchema.java b/airbyte-config/models/src/main/java/io/airbyte/config/ConfigSchema.java index 4d2296a0cbde..5ad284858ebb 100644 --- a/airbyte-config/models/src/main/java/io/airbyte/config/ConfigSchema.java +++ b/airbyte-config/models/src/main/java/io/airbyte/config/ConfigSchema.java @@ -17,6 +17,11 @@ public enum ConfigSchema implements AirbyteConfig { standardWorkspace -> standardWorkspace.getWorkspaceId().toString(), "workspaceId"), + WORKSPACE_SERVICE_ACCOUNT("WorkspaceServiceAccount.yaml", + WorkspaceServiceAccount.class, + workspaceServiceAccount -> workspaceServiceAccount.getWorkspaceId().toString(), + "workspaceId"), + // source STANDARD_SOURCE_DEFINITION("StandardSourceDefinition.yaml", StandardSourceDefinition.class, diff --git a/airbyte-config/models/src/main/resources/types/WorkspaceServiceAccount.yaml b/airbyte-config/models/src/main/resources/types/WorkspaceServiceAccount.yaml new file mode 100644 index 000000000000..f18e2183edb0 --- /dev/null +++ b/airbyte-config/models/src/main/resources/types/WorkspaceServiceAccount.yaml @@ -0,0 +1,31 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/WorkspaceServiceAccount.yaml +title: WorkspaceServiceAccount +description: service account attached to a workspace +type: object +required: + - workspaceId + - serviceAccountId + - serviceAccountEmail + - jsonCredential + - hmacKey +additionalProperties: false +properties: + workspaceId: + type: string + format: uuid + serviceAccountId: + type: string + serviceAccountEmail: + type: string + jsonCredential: + # Ref : io.airbyte.config.persistence.MockData#workspaceServiceAccounts() for sample data + description: Represents the JSON key generated for the service account + type: object + existingJavaType: com.fasterxml.jackson.databind.JsonNode + hmacKey: + # Ref : io.airbyte.config.persistence.MockData#workspaceServiceAccounts() for sample data + description: Represents the secret and access id of generated HMAC key for the service account + type: object + existingJavaType: com.fasterxml.jackson.databind.JsonNode diff --git a/airbyte-config/persistence/build.gradle b/airbyte-config/persistence/build.gradle index 1f92dda1491e..8fe422a4fa67 100644 --- a/airbyte-config/persistence/build.gradle +++ b/airbyte-config/persistence/build.gradle @@ -15,6 +15,7 @@ dependencies { implementation 'commons-io:commons-io:2.7' implementation 'com.google.cloud:google-cloud-secretmanager:2.0.5' + testImplementation 'org.hamcrest:hamcrest-all:1.3' testImplementation "org.testcontainers:postgresql:1.15.3" testImplementation project(':airbyte-test-utils') integrationTestJavaImplementation project(':airbyte-config:persistence') diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java index ceb15ad955b8..c5c208b8a8ba 100644 --- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java @@ -37,6 +37,7 @@ import io.airbyte.config.StandardSyncState; import io.airbyte.config.StandardWorkspace; import io.airbyte.config.State; +import io.airbyte.config.WorkspaceServiceAccount; import io.airbyte.db.Database; import io.airbyte.db.ExceptionWrappingDatabase; import io.airbyte.db.instance.configs.jooq.enums.ActorType; @@ -970,4 +971,15 @@ private Condition includeTombstones(final Field tombstoneField, final b } } + public WorkspaceServiceAccount getWorkspaceServiceAccountNoSecrets(final UUID workspaceId) + throws JsonValidationException, IOException, ConfigNotFoundException { + return persistence.getConfig(ConfigSchema.WORKSPACE_SERVICE_ACCOUNT, workspaceId.toString(), WorkspaceServiceAccount.class); + } + + public void writeWorkspaceServiceAccountNoSecrets(final WorkspaceServiceAccount workspaceServiceAccount) + throws JsonValidationException, IOException { + persistence.writeConfig(ConfigSchema.WORKSPACE_SERVICE_ACCOUNT, workspaceServiceAccount.getWorkspaceId().toString(), + workspaceServiceAccount); + } + } diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/DatabaseConfigPersistence.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/DatabaseConfigPersistence.java index 3462179c6db8..ef3fc2a0636a 100644 --- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/DatabaseConfigPersistence.java +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/DatabaseConfigPersistence.java @@ -14,6 +14,7 @@ import static io.airbyte.db.instance.configs.jooq.Tables.OPERATION; import static io.airbyte.db.instance.configs.jooq.Tables.STATE; import static io.airbyte.db.instance.configs.jooq.Tables.WORKSPACE; +import static io.airbyte.db.instance.configs.jooq.Tables.WORKSPACE_SERVICE_ACCOUNT; import static org.jooq.impl.DSL.asterisk; import static org.jooq.impl.DSL.select; @@ -45,6 +46,7 @@ import io.airbyte.config.StandardSyncState; import io.airbyte.config.StandardWorkspace; import io.airbyte.config.State; +import io.airbyte.config.WorkspaceServiceAccount; import io.airbyte.config.persistence.split_secrets.JsonSecretsProcessor; import io.airbyte.db.Database; import io.airbyte.db.ExceptionWrappingDatabase; @@ -127,6 +129,8 @@ public T getConfig(final AirbyteConfig configType, final String configId, fi return (T) getActorCatalog(configId); } else if (configType == ConfigSchema.ACTOR_CATALOG_FETCH_EVENT) { return (T) getActorCatalogFetchEvent(configId); + } else if (configType == ConfigSchema.WORKSPACE_SERVICE_ACCOUNT) { + return (T) getWorkspaceServiceAccount(configId); } else { throw new IllegalArgumentException("Unknown Config Type " + configType); } @@ -138,6 +142,12 @@ private StandardWorkspace getStandardWorkspace(final String configId) throws IOE return result.get(0).getConfig(); } + private WorkspaceServiceAccount getWorkspaceServiceAccount(final String configId) throws IOException, ConfigNotFoundException { + final List> result = listWorkspaceServiceAccountWithMetadata(Optional.of(UUID.fromString(configId))); + validate(configId, result, ConfigSchema.WORKSPACE_SERVICE_ACCOUNT); + return result.get(0).getConfig(); + } + private StandardSourceDefinition getStandardSourceDefinition(final String configId) throws IOException, ConfigNotFoundException { final List> result = listStandardSourceDefinitionWithMetadata(Optional.of(UUID.fromString(configId))); @@ -272,6 +282,8 @@ public ConfigWithMetadata getConfigWithMetadata(final AirbyteConfig confi return (ConfigWithMetadata) validateAndReturn(configId, listActorCatalogWithMetadata(configIdOpt), configType); } else if (configType == ConfigSchema.ACTOR_CATALOG_FETCH_EVENT) { return (ConfigWithMetadata) validateAndReturn(configId, listActorCatalogFetchEventWithMetadata(configIdOpt), configType); + } else if (configType == ConfigSchema.WORKSPACE_SERVICE_ACCOUNT) { + return (ConfigWithMetadata) validateAndReturn(configId, listWorkspaceServiceAccountWithMetadata(configIdOpt), configType); } else { throw new IllegalArgumentException("Unknown Config Type " + configType); } @@ -304,6 +316,8 @@ public List> listConfigsWithMetadata(final AirbyteConf listActorCatalogWithMetadata().forEach(c -> configWithMetadata.add((ConfigWithMetadata) c)); } else if (configType == ConfigSchema.ACTOR_CATALOG_FETCH_EVENT) { listActorCatalogFetchEventWithMetadata().forEach(c -> configWithMetadata.add((ConfigWithMetadata) c)); + } else if (configType == ConfigSchema.WORKSPACE_SERVICE_ACCOUNT) { + listWorkspaceServiceAccountWithMetadata().forEach(c -> configWithMetadata.add((ConfigWithMetadata) c)); } else { throw new IllegalArgumentException("Unknown Config Type " + configType); } @@ -337,6 +351,33 @@ private List> listStandardWorkspaceWithMet return standardWorkspaces; } + private List> listWorkspaceServiceAccountWithMetadata() throws IOException { + return listWorkspaceServiceAccountWithMetadata(Optional.empty()); + } + + private List> listWorkspaceServiceAccountWithMetadata(final Optional configId) + throws IOException { + final Result result = database.query(ctx -> { + final SelectJoinStep query = ctx.select(asterisk()).from(WORKSPACE_SERVICE_ACCOUNT); + if (configId.isPresent()) { + return query.where(WORKSPACE_SERVICE_ACCOUNT.WORKSPACE_ID.eq(configId.get())).fetch(); + } + return query.fetch(); + }); + + final List> workspaceServiceAccounts = new ArrayList<>(); + for (final Record record : result) { + final WorkspaceServiceAccount workspaceServiceAccount = DbConverter.buildWorkspaceServiceAccount(record); + workspaceServiceAccounts.add(new ConfigWithMetadata<>( + record.get(WORKSPACE_SERVICE_ACCOUNT.WORKSPACE_ID).toString(), + ConfigSchema.WORKSPACE_SERVICE_ACCOUNT.name(), + record.get(WORKSPACE_SERVICE_ACCOUNT.CREATED_AT).toInstant(), + record.get(WORKSPACE_SERVICE_ACCOUNT.UPDATED_AT).toInstant(), + workspaceServiceAccount)); + } + return workspaceServiceAccounts; + } + private List> listStandardSourceDefinitionWithMetadata() throws IOException { return listStandardSourceDefinitionWithMetadata(Optional.empty()); } @@ -697,6 +738,8 @@ public void writeConfig(final AirbyteConfig configType, final String configI writeActorCatalog(Collections.singletonList((ActorCatalog) config)); } else if (configType == ConfigSchema.ACTOR_CATALOG_FETCH_EVENT) { writeActorCatalogFetchEvent(Collections.singletonList((ActorCatalogFetchEvent) config)); + } else if (configType == ConfigSchema.WORKSPACE_SERVICE_ACCOUNT) { + writeWorkspaceServiceAccount(Collections.singletonList((WorkspaceServiceAccount) config)); } else { throw new IllegalArgumentException("Unknown Config Type " + configType); } @@ -758,6 +801,44 @@ private void writeStandardWorkspace(final List configs, final }); } + private void writeWorkspaceServiceAccount(final List configs) throws IOException { + database.transaction(ctx -> { + writeWorkspaceServiceAccount(configs, ctx); + return null; + }); + } + + private void writeWorkspaceServiceAccount(final List configs, final DSLContext ctx) { + final OffsetDateTime timestamp = OffsetDateTime.now(); + configs.forEach((workspaceServiceAccount) -> { + final boolean isExistingConfig = ctx.fetchExists(select() + .from(WORKSPACE_SERVICE_ACCOUNT) + .where(WORKSPACE_SERVICE_ACCOUNT.WORKSPACE_ID.eq(workspaceServiceAccount.getWorkspaceId()))); + + if (isExistingConfig) { + ctx.update(WORKSPACE_SERVICE_ACCOUNT) + .set(WORKSPACE_SERVICE_ACCOUNT.WORKSPACE_ID, workspaceServiceAccount.getWorkspaceId()) + .set(WORKSPACE_SERVICE_ACCOUNT.SERVICE_ACCOUNT_ID, workspaceServiceAccount.getServiceAccountId()) + .set(WORKSPACE_SERVICE_ACCOUNT.SERVICE_ACCOUNT_EMAIL, workspaceServiceAccount.getServiceAccountEmail()) + .set(WORKSPACE_SERVICE_ACCOUNT.JSON_CREDENTIAL, JSONB.valueOf(Jsons.serialize(workspaceServiceAccount.getJsonCredential()))) + .set(WORKSPACE_SERVICE_ACCOUNT.HMAC_KEY, JSONB.valueOf(Jsons.serialize(workspaceServiceAccount.getHmacKey()))) + .set(WORKSPACE_SERVICE_ACCOUNT.UPDATED_AT, timestamp) + .where(WORKSPACE_SERVICE_ACCOUNT.WORKSPACE_ID.eq(workspaceServiceAccount.getWorkspaceId())) + .execute(); + } else { + ctx.insertInto(WORKSPACE_SERVICE_ACCOUNT) + .set(WORKSPACE_SERVICE_ACCOUNT.WORKSPACE_ID, workspaceServiceAccount.getWorkspaceId()) + .set(WORKSPACE_SERVICE_ACCOUNT.SERVICE_ACCOUNT_ID, workspaceServiceAccount.getServiceAccountId()) + .set(WORKSPACE_SERVICE_ACCOUNT.SERVICE_ACCOUNT_EMAIL, workspaceServiceAccount.getServiceAccountEmail()) + .set(WORKSPACE_SERVICE_ACCOUNT.JSON_CREDENTIAL, JSONB.valueOf(Jsons.serialize(workspaceServiceAccount.getJsonCredential()))) + .set(WORKSPACE_SERVICE_ACCOUNT.HMAC_KEY, JSONB.valueOf(Jsons.serialize(workspaceServiceAccount.getHmacKey()))) + .set(WORKSPACE_SERVICE_ACCOUNT.CREATED_AT, timestamp) + .set(WORKSPACE_SERVICE_ACCOUNT.UPDATED_AT, timestamp) + .execute(); + } + }); + } + private void writeStandardSourceDefinition(final List configs) throws IOException { database.transaction(ctx -> { ConfigWriter.writeStandardSourceDefinition(configs, ctx); @@ -1190,6 +1271,8 @@ public void writeConfigs(final AirbyteConfig configType, final Map (ActorCatalog) c).collect(Collectors.toList())); } else if (configType == ConfigSchema.ACTOR_CATALOG_FETCH_EVENT) { writeActorCatalogFetchEvent(configs.values().stream().map(c -> (ActorCatalogFetchEvent) c).collect(Collectors.toList())); + } else if (configType == ConfigSchema.WORKSPACE_SERVICE_ACCOUNT) { + writeWorkspaceServiceAccount(configs.values().stream().map(c -> (WorkspaceServiceAccount) c).collect(Collectors.toList())); } else { throw new IllegalArgumentException("Unknown Config Type " + configType); } @@ -1221,6 +1304,8 @@ public void deleteConfig(final AirbyteConfig configType, final String configId) deleteConfig(ACTOR_CATALOG, ACTOR_CATALOG.ID, UUID.fromString(configId)); } else if (configType == ConfigSchema.ACTOR_CATALOG_FETCH_EVENT) { deleteConfig(ACTOR_CATALOG_FETCH_EVENT, ACTOR_CATALOG_FETCH_EVENT.ID, UUID.fromString(configId)); + } else if (configType == ConfigSchema.WORKSPACE_SERVICE_ACCOUNT) { + deleteConfig(WORKSPACE_SERVICE_ACCOUNT, WORKSPACE_SERVICE_ACCOUNT.WORKSPACE_ID, UUID.fromString(configId)); } else { throw new IllegalArgumentException("Unknown Config Type " + configType); } @@ -1278,6 +1363,7 @@ public void replaceAllConfigs(final Map> configs, final ctx.truncate(STATE).restartIdentity().cascade().execute(); ctx.truncate(ACTOR_CATALOG).restartIdentity().cascade().execute(); ctx.truncate(ACTOR_CATALOG_FETCH_EVENT).restartIdentity().cascade().execute(); + ctx.truncate(WORKSPACE_SERVICE_ACCOUNT).restartIdentity().cascade().execute(); if (configs.containsKey(ConfigSchema.STANDARD_WORKSPACE)) { configs.get(ConfigSchema.STANDARD_WORKSPACE).map(c -> (StandardWorkspace) c) diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/DbConverter.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/DbConverter.java index c78acc55c1e4..1bad92ebaf6b 100644 --- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/DbConverter.java +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/DbConverter.java @@ -9,6 +9,7 @@ import static io.airbyte.db.instance.configs.jooq.Tables.ACTOR_OAUTH_PARAMETER; import static io.airbyte.db.instance.configs.jooq.Tables.CONNECTION; import static io.airbyte.db.instance.configs.jooq.Tables.WORKSPACE; +import static io.airbyte.db.instance.configs.jooq.Tables.WORKSPACE_SERVICE_ACCOUNT; import io.airbyte.commons.enums.Enums; import io.airbyte.commons.json.Jsons; @@ -26,6 +27,7 @@ import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSync.Status; import io.airbyte.config.StandardWorkspace; +import io.airbyte.config.WorkspaceServiceAccount; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConnectorSpecification; import java.io.IOException; @@ -147,4 +149,15 @@ public static ActorCatalog buildActorCatalog(final Record record) { .withCatalogHash(record.get(ACTOR_CATALOG.CATALOG_HASH)); } + public static WorkspaceServiceAccount buildWorkspaceServiceAccount(final Record record) { + return new WorkspaceServiceAccount() + .withWorkspaceId(record.get(WORKSPACE_SERVICE_ACCOUNT.WORKSPACE_ID)) + .withServiceAccountId(record.get(WORKSPACE_SERVICE_ACCOUNT.SERVICE_ACCOUNT_ID)) + .withServiceAccountEmail(record.get(WORKSPACE_SERVICE_ACCOUNT.SERVICE_ACCOUNT_EMAIL)) + .withJsonCredential(record.get(WORKSPACE_SERVICE_ACCOUNT.JSON_CREDENTIAL) == null ? null + : Jsons.deserialize(record.get(WORKSPACE_SERVICE_ACCOUNT.JSON_CREDENTIAL).data())) + .withHmacKey(record.get(WORKSPACE_SERVICE_ACCOUNT.HMAC_KEY) == null ? null + : Jsons.deserialize(record.get(WORKSPACE_SERVICE_ACCOUNT.HMAC_KEY).data())); + } + } diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryReader.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryReader.java index 525743e55907..6040734e3154 100644 --- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryReader.java +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/SecretsRepositoryReader.java @@ -10,6 +10,7 @@ import io.airbyte.config.ConfigSchema; import io.airbyte.config.DestinationConnection; import io.airbyte.config.SourceConnection; +import io.airbyte.config.WorkspaceServiceAccount; import io.airbyte.config.persistence.split_secrets.SecretsHydrator; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -96,4 +97,18 @@ private void hydrateValuesIfKeyPresent(final String key, final Map> configs, final } } + public void writeServiceAccountJsonCredentials(final WorkspaceServiceAccount workspaceServiceAccount) + throws JsonValidationException, IOException { + final WorkspaceServiceAccount workspaceServiceAccountForDB = getWorkspaceServiceAccountWithSecretCoordinate(workspaceServiceAccount); + configRepository.writeWorkspaceServiceAccountNoSecrets(workspaceServiceAccountForDB); + } + + /** + * This method is to encrypt the secret JSON key and HMAC key of a GCP service account a associated + * with a workspace. If in future we build a similar feature i.e. an AWS account associated with a + * workspace, we will have to build new implementation for it + */ + private WorkspaceServiceAccount getWorkspaceServiceAccountWithSecretCoordinate(final WorkspaceServiceAccount workspaceServiceAccount) + throws JsonValidationException, IOException { + if (longLivedSecretPersistence.isPresent()) { + final WorkspaceServiceAccount clonedWorkspaceServiceAccount = Jsons.clone(workspaceServiceAccount); + final Optional optionalWorkspaceServiceAccount = getOptionalWorkspaceServiceAccount( + workspaceServiceAccount.getWorkspaceId()); + // Convert the JSON key of Service Account into secret co-oridnate. Ref : + // https://cloud.google.com/iam/docs/service-accounts#key-types + if (workspaceServiceAccount.getJsonCredential() != null) { + final SecretCoordinateToPayload jsonCredSecretCoordinateToPayload = + SecretsHelpers.convertServiceAccountCredsToSecret(workspaceServiceAccount.getJsonCredential().toPrettyString(), + longLivedSecretPersistence.get(), + workspaceServiceAccount.getWorkspaceId(), + UUID::randomUUID, + optionalWorkspaceServiceAccount.map(WorkspaceServiceAccount::getJsonCredential).orElse(null), + "json"); + longLivedSecretPersistence.get().write(jsonCredSecretCoordinateToPayload.secretCoordinate(), jsonCredSecretCoordinateToPayload.payload()); + clonedWorkspaceServiceAccount.setJsonCredential(jsonCredSecretCoordinateToPayload.secretCoordinateForDB()); + } + // Convert the HMAC key of Service Account into secret co-oridnate. Ref : + // https://cloud.google.com/storage/docs/authentication/hmackeys + if (workspaceServiceAccount.getHmacKey() != null) { + final SecretCoordinateToPayload hmackKeySecretCoordinateToPayload = + SecretsHelpers.convertServiceAccountCredsToSecret(workspaceServiceAccount.getHmacKey().toString(), + longLivedSecretPersistence.get(), + workspaceServiceAccount.getWorkspaceId(), + UUID::randomUUID, + optionalWorkspaceServiceAccount.map(WorkspaceServiceAccount::getHmacKey).orElse(null), + "hmac"); + longLivedSecretPersistence.get().write(hmackKeySecretCoordinateToPayload.secretCoordinate(), hmackKeySecretCoordinateToPayload.payload()); + clonedWorkspaceServiceAccount.setHmacKey(hmackKeySecretCoordinateToPayload.secretCoordinateForDB()); + } + return clonedWorkspaceServiceAccount; + } + return workspaceServiceAccount; + } + + public Optional getOptionalWorkspaceServiceAccount(final UUID workspaceId) + throws JsonValidationException, IOException { + try { + return Optional.of(configRepository.getWorkspaceServiceAccountNoSecrets(workspaceId)); + } catch (ConfigNotFoundException e) { + return Optional.empty(); + } + } + } diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/NoOpSecretsHydrator.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/NoOpSecretsHydrator.java index b2185c56093c..bb02572bc1bb 100644 --- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/NoOpSecretsHydrator.java +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/NoOpSecretsHydrator.java @@ -16,4 +16,9 @@ public JsonNode hydrate(final JsonNode partialConfig) { return partialConfig; } + @Override + public JsonNode hydrateSecretCoordinate(JsonNode secretCoordinate) { + return secretCoordinate; + } + } diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/RealSecretsHydrator.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/RealSecretsHydrator.java index cfdffb0f0057..69e3d07bdd08 100644 --- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/RealSecretsHydrator.java +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/RealSecretsHydrator.java @@ -22,4 +22,9 @@ public JsonNode hydrate(final JsonNode partialConfig) { return SecretsHelpers.combineConfig(partialConfig, readOnlySecretPersistence); } + @Override + public JsonNode hydrateSecretCoordinate(final JsonNode secretCoordinate) { + return SecretsHelpers.hydrateSecretCoordinate(secretCoordinate, readOnlySecretPersistence); + } + } diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretCoordinateToPayload.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretCoordinateToPayload.java new file mode 100644 index 000000000000..41b9ca584809 --- /dev/null +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretCoordinateToPayload.java @@ -0,0 +1,13 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.config.persistence.split_secrets; + +import com.fasterxml.jackson.databind.JsonNode; + +public record SecretCoordinateToPayload(SecretCoordinate secretCoordinate, + String payload, + JsonNode secretCoordinateForDB) { + +} diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHelpers.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHelpers.java index 579ec254dd65..e4a3406228ad 100644 --- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHelpers.java +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHelpers.java @@ -116,7 +116,7 @@ public static JsonNode combineConfig(final JsonNode partialConfig, final ReadOnl if (config.has(COORDINATE_FIELD)) { final var coordinateNode = config.get(COORDINATE_FIELD); final var coordinate = getCoordinateFromTextNode(coordinateNode); - return getOrThrowSecretValueNode(secretPersistence, coordinate); + return new TextNode(getOrThrowSecretValue(secretPersistence, coordinate)); } // otherwise iterate through all object fields @@ -336,16 +336,16 @@ private static JsonNode getFieldOrEmptyNode(final JsonNode node, final int field * @param secretPersistence storage layer for secrets * @param coordinate reference to a secret in the persistence * @throws RuntimeException when a secret at that coordinate is not available in the persistence - * @return a json text node containing the secret value + * @return a json string containing the secret value or a JSON */ - private static TextNode getOrThrowSecretValueNode(final ReadOnlySecretPersistence secretPersistence, final SecretCoordinate coordinate) { + private static String getOrThrowSecretValue(final ReadOnlySecretPersistence secretPersistence, + final SecretCoordinate coordinate) { final var secretValue = secretPersistence.read(coordinate); if (secretValue.isEmpty()) { throw new RuntimeException(String.format("That secret was not found in the store! Coordinate: %s", coordinate.getFullCoordinate())); } - - return new TextNode(secretValue.get()); + return secretValue.get(); } private static SecretCoordinate getCoordinateFromTextNode(final JsonNode node) { @@ -379,6 +379,15 @@ protected static SecretCoordinate getCoordinate( final UUID workspaceId, final Supplier uuidSupplier, final @Nullable String oldSecretFullCoordinate) { + return getSecretCoordinate("airbyte_workspace_", newSecret, secretReader, workspaceId, uuidSupplier, oldSecretFullCoordinate); + } + + private static SecretCoordinate getSecretCoordinate(final String secretBasePrefix, + final String newSecret, + final ReadOnlySecretPersistence secretReader, + final UUID secretBaseId, + final Supplier uuidSupplier, + final @Nullable String oldSecretFullCoordinate) { String coordinateBase = null; Long version = null; @@ -398,7 +407,7 @@ protected static SecretCoordinate getCoordinate( if (coordinateBase == null) { // IMPORTANT: format of this cannot be changed without introducing migrations for secrets // persistences - coordinateBase = "airbyte_workspace_" + workspaceId + "_secret_" + uuidSupplier.get(); + coordinateBase = secretBasePrefix + secretBaseId + "_secret_" + uuidSupplier.get(); } if (version == null) { @@ -408,4 +417,54 @@ protected static SecretCoordinate getCoordinate( return new SecretCoordinate(coordinateBase, version); } + /** + * This method takes in the key (JSON key or HMAC key) of a workspace service account as a secret + * and generates a co-ordinate for the secret so that the secret can be written in secret + * persistence at the generated co-ordinate + * + * @param newSecret The JSON key or HMAC key value + * @param secretReader To read the value from secret persistence for comparison with the new value + * @param workspaceId of the service account + * @param uuidSupplier provided to allow a test case to produce known UUIDs in order for easy * + * fixture creation. + * @param oldSecretCoordinate a nullable full coordinate (base+version) retrieved from the * + * previous config + * @param keyType HMAC ot JSON key + * @return a coordinate (versioned reference to where the secret is stored in the persistence) + */ + public static SecretCoordinateToPayload convertServiceAccountCredsToSecret(final String newSecret, + final ReadOnlySecretPersistence secretReader, + final UUID workspaceId, + final Supplier uuidSupplier, + final @Nullable JsonNode oldSecretCoordinate, + final String keyType) { + final String oldSecretFullCoordinate = + (oldSecretCoordinate != null && oldSecretCoordinate.has(COORDINATE_FIELD)) ? oldSecretCoordinate.get(COORDINATE_FIELD).asText() + : null; + final SecretCoordinate coordinateForStagingConfig = getSecretCoordinate("service_account_" + keyType + "_", + newSecret, + secretReader, + workspaceId, + uuidSupplier, + oldSecretFullCoordinate); + return new SecretCoordinateToPayload(coordinateForStagingConfig, + newSecret, + Jsons.jsonNode(Map.of(COORDINATE_FIELD, + coordinateForStagingConfig.getFullCoordinate()))); + } + + /** + * Takes in the secret coordinate in form of a JSON and fetches the secret from the store + * + * @param secretCoordinateAsJson The co-ordinate at which we expect the secret value to be present + * in the secret persistence + * @param readOnlySecretPersistence The secret persistence + * @return Original secret value as JsonNode + */ + public static JsonNode hydrateSecretCoordinate(final JsonNode secretCoordinateAsJson, + final ReadOnlySecretPersistence readOnlySecretPersistence) { + final var secretCoordinate = getCoordinateFromTextNode(secretCoordinateAsJson.get(COORDINATE_FIELD)); + return Jsons.deserialize(getOrThrowSecretValue(readOnlySecretPersistence, secretCoordinate)); + } + } diff --git a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHydrator.java b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHydrator.java index 3c61c0b45638..808e6a7327a1 100644 --- a/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHydrator.java +++ b/airbyte-config/persistence/src/main/java/io/airbyte/config/persistence/split_secrets/SecretsHydrator.java @@ -19,4 +19,12 @@ public interface SecretsHydrator { */ JsonNode hydrate(JsonNode partialConfig); + /** + * Takes in the secret coordinate in form of a JSON and fetches the secret from the store + * + * @param secretCoordinate The co-ordinate of the secret in the store in JSON format + * @return original secret value + */ + JsonNode hydrateSecretCoordinate(final JsonNode secretCoordinate); + } diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/BaseDatabaseConfigPersistenceTest.java b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/BaseDatabaseConfigPersistenceTest.java index 87a9af309f59..b44070682e06 100644 --- a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/BaseDatabaseConfigPersistenceTest.java +++ b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/BaseDatabaseConfigPersistenceTest.java @@ -63,7 +63,7 @@ public static void dbDown() { protected static void truncateAllTables() throws SQLException { database.query(ctx -> ctx .execute( - "TRUNCATE TABLE state, actor_catalog, actor_catalog_fetch_event, connection_operation, connection, operation, actor_oauth_parameter, " + "TRUNCATE TABLE workspace_service_account, state, actor_catalog, actor_catalog_fetch_event, connection_operation, connection, operation, actor_oauth_parameter, " + "actor, actor_definition, actor_definition_workspace_grant, workspace")); } diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceE2EReadWriteTest.java b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceE2EReadWriteTest.java index 9b688fd2553a..f05fbb986ff6 100644 --- a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceE2EReadWriteTest.java +++ b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/DatabaseConfigPersistenceE2EReadWriteTest.java @@ -23,6 +23,7 @@ import io.airbyte.config.StandardSyncOperation; import io.airbyte.config.StandardSyncState; import io.airbyte.config.StandardWorkspace; +import io.airbyte.config.WorkspaceServiceAccount; import io.airbyte.db.instance.configs.ConfigsDatabaseInstance; import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; import io.airbyte.db.instance.development.DevDatabaseMigrator; @@ -66,6 +67,7 @@ public void test() throws JsonValidationException, IOException, ConfigNotFoundEx standardSync(); standardSyncState(); standardActorCatalog(); + workspaceServiceAccounts(); deletion(); } @@ -80,6 +82,7 @@ private void deletion() throws ConfigNotFoundException, IOException, JsonValidat assertTrue(configPersistence.listConfigs(ConfigSchema.DESTINATION_CONNECTION, SourceConnection.class).isEmpty()); assertTrue(configPersistence.listConfigs(ConfigSchema.STANDARD_WORKSPACE, StandardWorkspace.class).isEmpty()); assertTrue(configPersistence.listConfigs(ConfigSchema.ACTOR_CATALOG_FETCH_EVENT, ActorCatalogFetchEvent.class).isEmpty()); + assertTrue(configPersistence.listConfigs(ConfigSchema.WORKSPACE_SERVICE_ACCOUNT, ActorCatalogFetchEvent.class).isEmpty()); assertFalse(configPersistence.listConfigs(ConfigSchema.SOURCE_OAUTH_PARAM, SourceOAuthParameter.class).isEmpty()); assertFalse(configPersistence.listConfigs(ConfigSchema.DESTINATION_OAUTH_PARAM, DestinationOAuthParameter.class).isEmpty()); @@ -301,4 +304,18 @@ public void standardActorCatalog() throws JsonValidationException, IOException, assertThat(MockData.actorCatalogFetchEvents()).hasSameElementsAs(actorCatalogFetchEvents); } + public void workspaceServiceAccounts() throws JsonValidationException, IOException, ConfigNotFoundException { + for (final WorkspaceServiceAccount expected : MockData.workspaceServiceAccounts()) { + configPersistence.writeConfig(ConfigSchema.WORKSPACE_SERVICE_ACCOUNT, expected.getWorkspaceId().toString(), + expected); + final WorkspaceServiceAccount actual = configPersistence.getConfig( + ConfigSchema.WORKSPACE_SERVICE_ACCOUNT, expected.getWorkspaceId().toString(), WorkspaceServiceAccount.class); + assertEquals(expected, actual); + } + final List actorConfigurationBindings = configPersistence + .listConfigs(ConfigSchema.WORKSPACE_SERVICE_ACCOUNT, WorkspaceServiceAccount.class); + assertEquals(MockData.workspaceServiceAccounts().size(), actorConfigurationBindings.size()); + assertThat(MockData.workspaceServiceAccounts()).hasSameElementsAs(actorConfigurationBindings); + } + } diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/MockData.java b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/MockData.java index 27d733d100a8..dcd6c6416e1e 100644 --- a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/MockData.java +++ b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/MockData.java @@ -4,6 +4,7 @@ package io.airbyte.config.persistence; +import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.config.ActorCatalog; @@ -33,6 +34,7 @@ import io.airbyte.config.StandardSyncState; import io.airbyte.config.StandardWorkspace; import io.airbyte.config.State; +import io.airbyte.config.WorkspaceServiceAccount; import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.protocol.models.AuthSpecification; import io.airbyte.protocol.models.AuthSpecification.AuthType; @@ -47,7 +49,10 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Map; +import java.util.TreeMap; import java.util.UUID; +import java.util.stream.Collectors; public class MockData { @@ -88,6 +93,37 @@ public class MockData { private static final UUID ACTOR_CATALOG_FETCH_EVENT_ID_1 = UUID.randomUUID(); private static final UUID ACTOR_CATALOG_FETCH_EVENT_ID_2 = UUID.randomUUID(); + public static final String MOCK_SERVICE_ACCOUNT_1 = "{\n" + + " \"type\" : \"service_account\",\n" + + " \"project_id\" : \"random-gcp-project\",\n" + + " \"private_key_id\" : \"123a1234ab1a123ab12345678a1234ab1abc1a12\",\n" + + " \"private_key\" : \"-----BEGIN RSA PRIVATE KEY-----\\nMIIEoQIBAAKCAQBtkKBs9oe9pFhEWjBls9OrY0PXE/QN6nL4Bfw4+UqcBpTyItXo\\n3aBXuVqDIZ377zjbJUcYuc4NzAsLImy7VVT1XrdAkkCKQEMoA9pQgONA/3kD8Xff\\nSUGfdup8UJg925paaRhM7u81e3XKGwGyL/qcxpuHtfqimeWWfSPy5AawyOFl+l25\\nOqbm8PK4/QVqk4pcorQuISUkrehY0Ji0gVQF+ZeBvg7lvBtjNEl//eysGtcZvk7X\\nHqg+EIBqRjVNDsViHj0xeoDFcFgXDeWzxeQ0c7gMsDthfm4SjgaVFdQwsJUeoC6X\\nlwUoBbFIVVKW0n+SH+kxLc7mhaGjyRYJLS6tAgMBAAECggEAaowetlf4IR/VBoN+\\nVSjPSvg5XMr2pyG7tB597RngyGJOLjpaMx5zc1u4/ZSPghRdAh/6R71I+HnYs3dC\\nrdqJyCPXqV+Qi+F6bUtx3p+4X9kQ4hjMLcOboWuPFF1774vDSvCwxQAGd8gb//LL\\nb3DhEdzCGvOJTN7EOdhwQSAmsXsfj0qKlmm8vv0HBQDvjYYWhy/UcPry5sAGQ8KU\\nnUPTkz/OMS56nBIgKXgZtGRTP1Q7Q9a6oLmlvbDxuKGUByUPNlveZplzyWDO3RUN\\nNPt9dwgGk6rZK0umunGr0lq+WOK33Ue1RJy2VIvvV6dt32x20ehfVKND8N8q+wJ3\\neJQggQKBgQC//dOX8RwkmIloRzzmbu+qY8o44/F5gtxj8maR+OJhvbpFEID49bBr\\nzYqcMKfcgHJr6638CXVGSO66IiKtQcTMJ/Vd8TQVPcNPI1h/RD+wT/nkWX6R/0YH\\njwwNmikeUDH2/hLQlRZ8O45hc4frDGRMeHn3MSS2YsBDSl6YL/zHpQKBgQCSF9Ka\\nyCZmw5eS63G5/X9SVXbLRPuc6Fus+IbRPttOzSRviUXHaBjwwVEJgIKODx/eVXgD\\nA/OvFUmwIn73uZD/XgJrhkwAendaa+yhWKAkO5pO/EdAslxRmgxqTXfRcyslKBbo\\ns4YAgeYUgzOaMH4UxY4pJ7H6BLsFlboL+8BcaQKBgDSCM1Cm/M91eH8wnJNZW+r6\\nB+CvVueoxqX/MdZSf3fD8CHbdaqhZ3LUcEhvdjl0V9b0Sk1YON7UK5Z0p49DIZPE\\nifL7eQcmMTh/rkCAZfrOpMWzRE6hxoFiuiUuOHi17jRjILozTEcF8tbsRgwfA392\\no8Tbh/Lp5zOAL4bn+PaRAoGAZ2AgEJJsSe9BRB8CPF+aRoJfKvrHKIJqzHyXuVzH\\nBn22uI3kKHQKoeHJG/Ypa6hcHpFP+KJFPrDLkaz3NwfCCFFXWQqQoQ4Hgp43tPvn\\nZXwfdqChMrCDDuL4wgfLLxRVhVdWzpapzZYdXopwazzBGqWoMIr8LzRFum/2VCBy\\nP3ECgYBGqjuYud6gtrzaQwmMfcA0pSYsii96d2LKwWzjgcMzLxge59PIWXeQJqOb\\nh97m3qCkkPzbceD6Id8m/EyrNb04V8Zr0ERlcK/a4nRSHoIWQZY01lDSGhneRKn1\\nncBvRqCfz6ajf+zBg3zK0af98IHL0FI2NsNJLPrOBFMcthjx/g==\\n-----END RSA PRIVATE KEY-----\",\n" + + " \"client_email\" : \"a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com\",\n" + + " \"client_id\" : \"123456789012345678901\",\n" + + " \"auth_uri\" : \"https://blah.blah.com/x/blah1/blah\",\n" + + " \"token_uri\" : \"https://blah.blah.com/blah\",\n" + + " \"auth_provider_x509_cert_url\" : \"https://www.blah.com/blah/v1/blah\",\n" + + " \"client_x509_cert_url\" : \"https://www.blah.com/blah/v1/blah/a123/a1e5ac98-7531-48e1-943b-b46636%40random-gcp-project.abc.abcdefghijklmno.com\"\n" + + "}"; + + public static final String MOCK_SERVICE_ACCOUNT_2 = "{\n" + + " \"type\" : \"service_account-2\",\n" + + " \"project_id\" : \"random-gcp-project\",\n" + + " \"private_key_id\" : \"123a1234ab1a123ab12345678a1234ab1abc1a12\",\n" + + " \"private_key\" : \"-----BEGIN RSA PRIVATE KEY-----\\nMIIEoQIBAAKCAQBtkKBs9oe9pFhEWjBls9OrY0PXE/QN6nL4Bfw4+UqcBpTyItXo\\n3aBXuVqDIZ377zjbJUcYuc4NzAsLImy7VVT1XrdAkkCKQEMoA9pQgONA/3kD8Xff\\nSUGfdup8UJg925paaRhM7u81e3XKGwGyL/qcxpuHtfqimeWWfSPy5AawyOFl+l25\\nOqbm8PK4/QVqk4pcorQuISUkrehY0Ji0gVQF+ZeBvg7lvBtjNEl//eysGtcZvk7X\\nHqg+EIBqRjVNDsViHj0xeoDFcFgXDeWzxeQ0c7gMsDthfm4SjgaVFdQwsJUeoC6X\\nlwUoBbFIVVKW0n+SH+kxLc7mhaGjyRYJLS6tAgMBAAECggEAaowetlf4IR/VBoN+\\nVSjPSvg5XMr2pyG7tB597RngyGJOLjpaMx5zc1u4/ZSPghRdAh/6R71I+HnYs3dC\\nrdqJyCPXqV+Qi+F6bUtx3p+4X9kQ4hjMLcOboWuPFF1774vDSvCwxQAGd8gb//LL\\nb3DhEdzCGvOJTN7EOdhwQSAmsXsfj0qKlmm8vv0HBQDvjYYWhy/UcPry5sAGQ8KU\\nnUPTkz/OMS56nBIgKXgZtGRTP1Q7Q9a6oLmlvbDxuKGUByUPNlveZplzyWDO3RUN\\nNPt9dwgGk6rZK0umunGr0lq+WOK33Ue1RJy2VIvvV6dt32x20ehfVKND8N8q+wJ3\\neJQggQKBgQC//dOX8RwkmIloRzzmbu+qY8o44/F5gtxj8maR+OJhvbpFEID49bBr\\nzYqcMKfcgHJr6638CXVGSO66IiKtQcTMJ/Vd8TQVPcNPI1h/RD+wT/nkWX6R/0YH\\njwwNmikeUDH2/hLQlRZ8O45hc4frDGRMeHn3MSS2YsBDSl6YL/zHpQKBgQCSF9Ka\\nyCZmw5eS63G5/X9SVXbLRPuc6Fus+IbRPttOzSRviUXHaBjwwVEJgIKODx/eVXgD\\nA/OvFUmwIn73uZD/XgJrhkwAendaa+yhWKAkO5pO/EdAslxRmgxqTXfRcyslKBbo\\ns4YAgeYUgzOaMH4UxY4pJ7H6BLsFlboL+8BcaQKBgDSCM1Cm/M91eH8wnJNZW+r6\\nB+CvVueoxqX/MdZSf3fD8CHbdaqhZ3LUcEhvdjl0V9b0Sk1YON7UK5Z0p49DIZPE\\nifL7eQcmMTh/rkCAZfrOpMWzRE6hxoFiuiUuOHi17jRjILozTEcF8tbsRgwfA392\\no8Tbh/Lp5zOAL4bn+PaRAoGAZ2AgEJJsSe9BRB8CPF+aRoJfKvrHKIJqzHyXuVzH\\nBn22uI3kKHQKoeHJG/Ypa6hcHpFP+KJFPrDLkaz3NwfCCFFXWQqQoQ4Hgp43tPvn\\nZXwfdqChMrCDDuL4wgfLLxRVhVdWzpapzZYdXopwazzBGqWoMIr8LzRFum/2VCBy\\nP3ECgYBGqjuYud6gtrzaQwmMfcA0pSYsii96d2LKwWzjgcMzLxge59PIWXeQJqOb\\nh97m3qCkkPzbceD6Id8m/EyrNb04V8Zr0ERlcK/a4nRSHoIWQZY01lDSGhneRKn1\\nncBvRqCfz6ajf+zBg3zK0af98IHL0FI2NsNJLPrOBFMcthjx/g==\\n-----END RSA PRIVATE KEY-----\",\n" + + " \"client_email\" : \"a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com\",\n" + + " \"client_id\" : \"123456789012345678901\",\n" + + " \"auth_uri\" : \"https://blah.blah.com/x/blah1/blah\",\n" + + " \"token_uri\" : \"https://blah.blah.com/blah\",\n" + + " \"auth_provider_x509_cert_url\" : \"https://www.blah.com/blah/v1/blah\",\n" + + " \"client_x509_cert_url\" : \"https://www.blah.com/blah/v1/blah/a123/a1e5ac98-7531-48e1-943b-b46636%40random-gcp-project.abc.abcdefghijklmno.com\"\n" + + "}"; + + public static final JsonNode HMAC_SECRET_PAYLOAD_1 = Jsons.jsonNode(sortMap( + Map.of("access_id", "ABCD1A1ABCDEFG1ABCDEFGH1ABC12ABCDEF1ABCDE1ABCDE1ABCDE12ABCDEF", "secret", "AB1AbcDEF//ABCDeFGHijKlmNOpqR1ABC1aBCDeF"))); + public static final JsonNode HMAC_SECRET_PAYLOAD_2 = Jsons.jsonNode(sortMap( + Map.of("access_id", "ABCD1A1ABCDEFG1ABCDEFGH1ABC12ABCDEF1ABCDE1ABCDE1ABCDE12ABCDEX", "secret", "AB1AbcDEF//ABCDeFGHijKlmNOpqR1ABC1aBCDeX"))); + private static final Instant NOW = Instant.parse("2021-12-15T20:30:40.00Z"); public static List standardWorkspaces() { @@ -546,6 +582,22 @@ public static List actorCatalogFetchEvents() { return Arrays.asList(actorCatalogFetchEvent1); } + public static List workspaceServiceAccounts() { + final WorkspaceServiceAccount workspaceServiceAccount = new WorkspaceServiceAccount() + .withWorkspaceId(WORKSPACE_ID_1) + .withHmacKey(HMAC_SECRET_PAYLOAD_1) + .withServiceAccountId("a1e5ac98-7531-48e1-943b-b46636") + .withServiceAccountEmail("a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com") + .withJsonCredential(Jsons.deserialize(MOCK_SERVICE_ACCOUNT_1)); + + return Arrays.asList(workspaceServiceAccount); + } + + private static Map sortMap(Map originalMap) { + return originalMap.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (oldValue, newValue) -> newValue, TreeMap::new)); + } + public static Instant now() { return NOW; } diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryReaderTest.java b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryReaderTest.java index 4d9c2211ac37..e02b66ce9716 100644 --- a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryReaderTest.java +++ b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryReaderTest.java @@ -4,8 +4,11 @@ package io.airbyte.config.persistence; +import static io.airbyte.config.persistence.MockData.HMAC_SECRET_PAYLOAD_1; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; import com.fasterxml.jackson.databind.JsonNode; @@ -14,9 +17,11 @@ import io.airbyte.config.DestinationConnection; import io.airbyte.config.SourceConnection; import io.airbyte.config.StandardWorkspace; +import io.airbyte.config.WorkspaceServiceAccount; import io.airbyte.config.persistence.split_secrets.MemorySecretPersistence; import io.airbyte.config.persistence.split_secrets.RealSecretsHydrator; import io.airbyte.config.persistence.split_secrets.SecretCoordinate; +import io.airbyte.config.persistence.split_secrets.SecretPersistence; import io.airbyte.config.persistence.split_secrets.SecretsHydrator; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -24,6 +29,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Optional; import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -116,4 +122,101 @@ void testDumpConfigsWithSecrets() throws IOException { assertEquals(expected, actual); } + @Test + public void testReadingServiceAccount() throws JsonValidationException, ConfigNotFoundException, IOException { + final ConfigRepository configRepository = mock(ConfigRepository.class); + final SecretPersistence secretPersistence = mock(SecretPersistence.class); + final RealSecretsHydrator realSecretsHydrator = new RealSecretsHydrator(secretPersistence); + final SecretsRepositoryReader secretsRepositoryReader = + spy(new SecretsRepositoryReader(configRepository, realSecretsHydrator)); + + final UUID workspaceId = UUID.fromString("13fb9a84-6bfa-4801-8f5e-ce717677babf"); + + final String jsonSecretPayload = MockData.MOCK_SERVICE_ACCOUNT_1; + + final SecretCoordinate secretCoordinateHmac = new SecretCoordinate( + "service_account_hmac_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_e86e2eab-af9b-42a3-b074-b923b4fa617e", 1); + + final SecretCoordinate secretCoordinateJson = new SecretCoordinate( + "service_account_json_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_6b894c2b-71dc-4481-bd9f-572402643cf9", 1); + + doReturn(new WorkspaceServiceAccount().withWorkspaceId(workspaceId).withHmacKey(Jsons.jsonNode( + Map.of("_secret", secretCoordinateHmac.getFullCoordinate()))).withJsonCredential(Jsons.jsonNode( + Map.of("_secret", secretCoordinateJson.getFullCoordinate()))) + .withServiceAccountEmail("a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com") + .withServiceAccountId("a1e5ac98-7531-48e1-943b-b46636")) + .when(configRepository).getWorkspaceServiceAccountNoSecrets(workspaceId); + + doReturn(Optional.of(HMAC_SECRET_PAYLOAD_1.toString())).when(secretPersistence).read(secretCoordinateHmac); + + doReturn(Optional.of(jsonSecretPayload)).when(secretPersistence).read(secretCoordinateJson); + + final WorkspaceServiceAccount actual = secretsRepositoryReader.getWorkspaceServiceAccountWithSecrets(workspaceId); + final WorkspaceServiceAccount expected = new WorkspaceServiceAccount().withWorkspaceId(workspaceId) + .withJsonCredential(Jsons.deserialize(jsonSecretPayload)).withHmacKey(HMAC_SECRET_PAYLOAD_1) + .withServiceAccountId("a1e5ac98-7531-48e1-943b-b46636") + .withServiceAccountEmail("a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com"); + assertEquals(expected, actual); + } + + @Test + public void testReadingServiceAccountWithJsonNull() throws JsonValidationException, ConfigNotFoundException, IOException { + final ConfigRepository configRepository = mock(ConfigRepository.class); + final SecretPersistence secretPersistence = mock(SecretPersistence.class); + final RealSecretsHydrator realSecretsHydrator = new RealSecretsHydrator(secretPersistence); + final SecretsRepositoryReader secretsRepositoryReader = + spy(new SecretsRepositoryReader(configRepository, realSecretsHydrator)); + + final UUID workspaceId = UUID.fromString("13fb9a84-6bfa-4801-8f5e-ce717677babf"); + + final SecretCoordinate secretCoordinateHmac = new SecretCoordinate( + "service_account_hmac_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_e86e2eab-af9b-42a3-b074-b923b4fa617e", 1); + + doReturn(new WorkspaceServiceAccount().withWorkspaceId(workspaceId).withHmacKey(Jsons.jsonNode( + Map.of("_secret", secretCoordinateHmac.getFullCoordinate()))) + .withServiceAccountEmail("a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com") + .withServiceAccountId("a1e5ac98-7531-48e1-943b-b46636")) + .when(configRepository).getWorkspaceServiceAccountNoSecrets(workspaceId); + + doReturn(Optional.of(HMAC_SECRET_PAYLOAD_1.toString())).when(secretPersistence).read(secretCoordinateHmac); + + final WorkspaceServiceAccount actual = secretsRepositoryReader.getWorkspaceServiceAccountWithSecrets(workspaceId); + final WorkspaceServiceAccount expected = new WorkspaceServiceAccount().withWorkspaceId(workspaceId) + .withHmacKey(HMAC_SECRET_PAYLOAD_1) + .withServiceAccountId("a1e5ac98-7531-48e1-943b-b46636") + .withServiceAccountEmail("a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com"); + assertEquals(expected, actual); + } + + @Test + public void testReadingServiceAccountWithHmacNull() throws JsonValidationException, ConfigNotFoundException, IOException { + final ConfigRepository configRepository = mock(ConfigRepository.class); + final SecretPersistence secretPersistence = mock(SecretPersistence.class); + final RealSecretsHydrator realSecretsHydrator = new RealSecretsHydrator(secretPersistence); + final SecretsRepositoryReader secretsRepositoryReader = + spy(new SecretsRepositoryReader(configRepository, realSecretsHydrator)); + + final UUID workspaceId = UUID.fromString("13fb9a84-6bfa-4801-8f5e-ce717677babf"); + + final String jsonSecretPayload = MockData.MOCK_SERVICE_ACCOUNT_1; + + final SecretCoordinate secretCoordinateJson = new SecretCoordinate( + "service_account_json_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_6b894c2b-71dc-4481-bd9f-572402643cf9", 1); + + doReturn(new WorkspaceServiceAccount().withWorkspaceId(workspaceId).withJsonCredential(Jsons.jsonNode( + Map.of("_secret", secretCoordinateJson.getFullCoordinate()))) + .withServiceAccountEmail("a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com") + .withServiceAccountId("a1e5ac98-7531-48e1-943b-b46636")) + .when(configRepository).getWorkspaceServiceAccountNoSecrets(workspaceId); + + doReturn(Optional.of(jsonSecretPayload)).when(secretPersistence).read(secretCoordinateJson); + + final WorkspaceServiceAccount actual = secretsRepositoryReader.getWorkspaceServiceAccountWithSecrets(workspaceId); + final WorkspaceServiceAccount expected = new WorkspaceServiceAccount().withWorkspaceId(workspaceId) + .withJsonCredential(Jsons.deserialize(jsonSecretPayload)) + .withServiceAccountId("a1e5ac98-7531-48e1-943b-b46636") + .withServiceAccountEmail("a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com"); + assertEquals(expected, actual); + } + } diff --git a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryWriterTest.java b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryWriterTest.java index 5cac50b2552e..0759f38951e2 100644 --- a/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryWriterTest.java +++ b/airbyte-config/persistence/src/test/java/io/airbyte/config/persistence/SecretsRepositoryWriterTest.java @@ -4,14 +4,22 @@ package io.airbyte.config.persistence; +import static io.airbyte.config.persistence.MockData.HMAC_SECRET_PAYLOAD_1; +import static io.airbyte.config.persistence.MockData.HMAC_SECRET_PAYLOAD_2; +import static io.airbyte.config.persistence.MockData.MOCK_SERVICE_ACCOUNT_1; +import static io.airbyte.config.persistence.MockData.MOCK_SERVICE_ACCOUNT_2; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -24,9 +32,11 @@ import io.airbyte.config.SourceConnection; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; +import io.airbyte.config.WorkspaceServiceAccount; import io.airbyte.config.persistence.split_secrets.MemorySecretPersistence; import io.airbyte.config.persistence.split_secrets.RealSecretsHydrator; import io.airbyte.config.persistence.split_secrets.SecretCoordinate; +import io.airbyte.config.persistence.split_secrets.SecretPersistence; import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -212,4 +222,160 @@ private static DestinationConnection injectCoordinateIntoDestination(final Strin return Jsons.clone(DESTINATION_WITH_FULL_CONFIG).withConfiguration(injectCoordinate(coordinate)); } + @Test + public void testWriteWorkspaceServiceAccount() throws JsonValidationException, ConfigNotFoundException, IOException { + final UUID workspaceId = UUID.randomUUID(); + + final String jsonSecretPayload = MOCK_SERVICE_ACCOUNT_1; + final WorkspaceServiceAccount workspaceServiceAccount = new WorkspaceServiceAccount() + .withWorkspaceId(workspaceId) + .withHmacKey(HMAC_SECRET_PAYLOAD_1) + .withServiceAccountId("a1e5ac98-7531-48e1-943b-b46636") + .withServiceAccountEmail("a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com") + .withJsonCredential(Jsons.deserialize(jsonSecretPayload)); + + doThrow(new ConfigNotFoundException(ConfigSchema.WORKSPACE_SERVICE_ACCOUNT, workspaceId.toString())) + .when(configRepository).getWorkspaceServiceAccountNoSecrets(workspaceId); + secretsRepositoryWriter.writeServiceAccountJsonCredentials(workspaceServiceAccount); + + assertEquals(2, longLivedSecretPersistence.getMap().size()); + + String jsonPayloadInPersistence = null; + String hmacPayloadInPersistence = null; + + SecretCoordinate jsonSecretCoordinate = null; + SecretCoordinate hmacSecretCoordinate = null; + for (Map.Entry entry : longLivedSecretPersistence.getMap().entrySet()) { + if (entry.getKey().getFullCoordinate().contains("json")) { + jsonSecretCoordinate = entry.getKey(); + jsonPayloadInPersistence = entry.getValue(); + } else if (entry.getKey().getFullCoordinate().contains("hmac")) { + hmacSecretCoordinate = entry.getKey(); + hmacPayloadInPersistence = entry.getValue(); + } else { + throw new RuntimeException(""); + } + } + + assertNotNull(jsonPayloadInPersistence); + assertNotNull(hmacPayloadInPersistence); + assertNotNull(jsonSecretCoordinate); + assertNotNull(hmacSecretCoordinate); + + assertEquals(jsonSecretPayload, jsonPayloadInPersistence); + assertEquals(HMAC_SECRET_PAYLOAD_1.toString(), hmacPayloadInPersistence); + + verify(configRepository).writeWorkspaceServiceAccountNoSecrets( + Jsons.clone(workspaceServiceAccount.withJsonCredential(Jsons.jsonNode(Map.of("_secret", jsonSecretCoordinate.getFullCoordinate()))) + .withHmacKey(Jsons.jsonNode(Map.of("_secret", hmacSecretCoordinate.getFullCoordinate()))))); + } + + @Test + public void testWriteSameStagingConfiguration() throws JsonValidationException, ConfigNotFoundException, IOException { + final ConfigRepository configRepository = mock(ConfigRepository.class); + final SecretPersistence secretPersistence = mock(SecretPersistence.class); + final SecretsRepositoryWriter secretsRepositoryWriter = spy( + new SecretsRepositoryWriter(configRepository, Optional.of(secretPersistence), Optional.of(secretPersistence))); + + final UUID workspaceId = UUID.fromString("13fb9a84-6bfa-4801-8f5e-ce717677babf"); + + final String jsonSecretPayload = MOCK_SERVICE_ACCOUNT_1; + final WorkspaceServiceAccount workspaceServiceAccount = new WorkspaceServiceAccount().withWorkspaceId(workspaceId).withHmacKey( + HMAC_SECRET_PAYLOAD_1) + .withServiceAccountId("a1e5ac98-7531-48e1-943b-b46636") + .withServiceAccountEmail("a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com") + .withJsonCredential(Jsons.deserialize(jsonSecretPayload)); + + final SecretCoordinate jsonSecretCoordinate = new SecretCoordinate( + "service_account_json_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_e86e2eab-af9b-42a3-b074-b923b4fa617e", 1); + + final SecretCoordinate hmacSecretCoordinate = new SecretCoordinate( + "service_account_hmac_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_e86e2eab-af9b-42a3-b074-b923b4fa617e", 1); + + final WorkspaceServiceAccount cloned = Jsons.clone(workspaceServiceAccount) + .withJsonCredential(Jsons.jsonNode(Map.of("_secret", jsonSecretCoordinate.getFullCoordinate()))) + .withHmacKey(Jsons.jsonNode(Map.of("_secret", hmacSecretCoordinate.getFullCoordinate()))); + + doReturn(cloned).when(configRepository).getWorkspaceServiceAccountNoSecrets(workspaceId); + + doReturn(Optional.of(jsonSecretPayload)).when(secretPersistence).read(jsonSecretCoordinate); + doReturn(Optional.of(HMAC_SECRET_PAYLOAD_1.toString())).when(secretPersistence).read(hmacSecretCoordinate); + secretsRepositoryWriter.writeServiceAccountJsonCredentials(workspaceServiceAccount); + + ArgumentCaptor coordinates = ArgumentCaptor.forClass(SecretCoordinate.class); + ArgumentCaptor payloads = ArgumentCaptor.forClass(String.class); + + verify(secretPersistence, times(2)).write(coordinates.capture(), payloads.capture()); + List actualCoordinates = coordinates.getAllValues(); + assertEquals(2, actualCoordinates.size()); + assertThat(actualCoordinates, containsInAnyOrder(jsonSecretCoordinate, hmacSecretCoordinate)); + + List actualPayload = payloads.getAllValues(); + assertEquals(2, actualPayload.size()); + assertThat(actualPayload, containsInAnyOrder(jsonSecretPayload, HMAC_SECRET_PAYLOAD_1.toString())); + + verify(secretPersistence).write(hmacSecretCoordinate, HMAC_SECRET_PAYLOAD_1.toString()); + verify(configRepository).writeWorkspaceServiceAccountNoSecrets( + cloned); + } + + @Test + public void testWriteDifferentStagingConfiguration() throws JsonValidationException, ConfigNotFoundException, IOException { + final ConfigRepository configRepository = mock(ConfigRepository.class); + final SecretPersistence secretPersistence = mock(SecretPersistence.class); + final SecretsRepositoryWriter secretsRepositoryWriter = + spy(new SecretsRepositoryWriter(configRepository, Optional.of(secretPersistence), Optional.of(secretPersistence))); + + final UUID workspaceId = UUID.fromString("13fb9a84-6bfa-4801-8f5e-ce717677babf"); + + final String jsonSecretOldPayload = MOCK_SERVICE_ACCOUNT_1; + final String jsonSecretNewPayload = MOCK_SERVICE_ACCOUNT_2; + + final WorkspaceServiceAccount workspaceServiceAccount = new WorkspaceServiceAccount() + .withWorkspaceId(workspaceId) + .withHmacKey(HMAC_SECRET_PAYLOAD_2) + .withServiceAccountId("a1e5ac98-7531-48e1-943b-b46636") + .withServiceAccountEmail("a1e5ac98-7531-48e1-943b-b46636@random-gcp-project.abc.abcdefghijklmno.com") + .withJsonCredential(Jsons.deserialize(jsonSecretNewPayload)); + + final SecretCoordinate jsonSecretOldCoordinate = new SecretCoordinate( + "service_account_json_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_e86e2eab-af9b-42a3-b074-b923b4fa617e", 1); + + final SecretCoordinate hmacSecretOldCoordinate = new SecretCoordinate( + "service_account_hmac_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_e86e2eab-af9b-42a3-b074-b923b4fa617e", 1); + + final WorkspaceServiceAccount cloned = Jsons.clone(workspaceServiceAccount) + .withJsonCredential(Jsons.jsonNode(Map.of("_secret", jsonSecretOldCoordinate.getFullCoordinate()))) + .withHmacKey(Jsons.jsonNode(Map.of("_secret", hmacSecretOldCoordinate.getFullCoordinate()))); + + doReturn(cloned).when(configRepository).getWorkspaceServiceAccountNoSecrets(workspaceId); + + doReturn(Optional.of(HMAC_SECRET_PAYLOAD_1.toString())).when(secretPersistence).read(hmacSecretOldCoordinate); + doReturn(Optional.of(jsonSecretOldPayload)).when(secretPersistence).read(jsonSecretOldCoordinate); + + secretsRepositoryWriter.writeServiceAccountJsonCredentials(workspaceServiceAccount); + + final SecretCoordinate jsonSecretNewCoordinate = new SecretCoordinate( + "service_account_json_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_e86e2eab-af9b-42a3-b074-b923b4fa617e", 2); + + final SecretCoordinate hmacSecretNewCoordinate = new SecretCoordinate( + "service_account_hmac_13fb9a84-6bfa-4801-8f5e-ce717677babf_secret_e86e2eab-af9b-42a3-b074-b923b4fa617e", 2); + + ArgumentCaptor coordinates = ArgumentCaptor.forClass(SecretCoordinate.class); + ArgumentCaptor payloads = ArgumentCaptor.forClass(String.class); + + verify(secretPersistence, times(2)).write(coordinates.capture(), payloads.capture()); + List actualCoordinates = coordinates.getAllValues(); + assertEquals(2, actualCoordinates.size()); + assertThat(actualCoordinates, containsInAnyOrder(jsonSecretNewCoordinate, hmacSecretNewCoordinate)); + + List actualPayload = payloads.getAllValues(); + assertEquals(2, actualPayload.size()); + assertThat(actualPayload, containsInAnyOrder(jsonSecretNewPayload, HMAC_SECRET_PAYLOAD_2.toString())); + + verify(configRepository).writeWorkspaceServiceAccountNoSecrets(Jsons.clone(workspaceServiceAccount).withJsonCredential(Jsons.jsonNode( + Map.of("_secret", jsonSecretNewCoordinate.getFullCoordinate()))).withHmacKey(Jsons.jsonNode( + Map.of("_secret", hmacSecretNewCoordinate.getFullCoordinate())))); + } + } diff --git a/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_65_001__CreateWorkspaceServiceAccountTable.java b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_65_001__CreateWorkspaceServiceAccountTable.java new file mode 100644 index 000000000000..e948eb00b98e --- /dev/null +++ b/airbyte-db/lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_35_65_001__CreateWorkspaceServiceAccountTable.java @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import static org.jooq.impl.DSL.currentOffsetDateTime; +import static org.jooq.impl.DSL.foreignKey; +import static org.jooq.impl.DSL.primaryKey; + +import java.time.OffsetDateTime; +import java.util.UUID; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.Field; +import org.jooq.JSONB; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_35_65_001__CreateWorkspaceServiceAccountTable extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_35_65_001__CreateWorkspaceServiceAccountTable.class); + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + final DSLContext ctx = DSL.using(context.getConnection()); + createAndPopulateWorkspace(ctx); + } + + private static void createAndPopulateWorkspace(final DSLContext ctx) { + final Field workspaceId = DSL.field("workspace_id", SQLDataType.UUID.nullable(false)); + final Field serviceAccountId = DSL.field("service_account_id", SQLDataType.VARCHAR(31).nullable(false)); + final Field serviceAccountEmail = DSL.field("service_account_email", SQLDataType.VARCHAR(256).nullable(false)); + final Field jsonCredential = DSL.field("json_credential", SQLDataType.JSONB.nullable(false)); + final Field hmacKey = DSL.field("hmac_key", SQLDataType.JSONB.nullable(false)); + final Field createdAt = + DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); + final Field updatedAt = + DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); + + ctx.createTableIfNotExists("workspace_service_account") + .columns(workspaceId, + serviceAccountId, + serviceAccountEmail, + jsonCredential, + hmacKey, + createdAt, + updatedAt) + .constraints(primaryKey(workspaceId, serviceAccountId), + foreignKey(workspaceId).references("workspace", "id").onDeleteCascade()) + .execute(); + LOGGER.info("workspace_service_account table created"); + } + +} diff --git a/airbyte-db/lib/src/main/resources/configs_database/schema_dump.txt b/airbyte-db/lib/src/main/resources/configs_database/schema_dump.txt index c941c10a7bfa..c3f993122cc6 100644 --- a/airbyte-db/lib/src/main/resources/configs_database/schema_dump.txt +++ b/airbyte-db/lib/src/main/resources/configs_database/schema_dump.txt @@ -162,6 +162,20 @@ create table "public"."workspace"( constraint "workspace_pkey" primary key ("id") ); +create table "public"."workspace_service_account"( + "workspace_id" uuid not null, + "service_account_id" varchar(31) not null, + "service_account_email" varchar(256) not null, + "json_credential" jsonb not null, + "hmac_key" jsonb not null, + "created_at" timestamptz(35) not null default null, + "updated_at" timestamptz(35) not null default null, + constraint "workspace_service_account_pkey" + primary key ( + "workspace_id", + "service_account_id" + ) +); alter table "public"."actor" add constraint "actor_actor_definition_id_fkey" foreign key ("actor_definition_id") @@ -218,6 +232,10 @@ alter table "public"."state" add constraint "state_connection_id_fkey" foreign key ("connection_id") references "public"."connection" ("id"); +alter table "public"."workspace_service_account" + add constraint "workspace_service_account_workspace_id_fkey" + foreign key ("workspace_id") + references "public"."workspace" ("id"); create index "actor_actor_definition_id_idx" on "public"."actor"("actor_definition_id" asc); create unique index "actor_pkey" on "public"."actor"("id" asc); create index "actor_workspace_id_idx" on "public"."actor"("workspace_id" asc); @@ -253,3 +271,7 @@ create unique index "state_pkey" on "public"."state"( "connection_id" asc ); create unique index "workspace_pkey" on "public"."workspace"("id" asc); +create unique index "workspace_service_account_pkey" on "public"."workspace_service_account"( + "workspace_id" asc, + "service_account_id" asc +); From c19a67103c0882ba329228ee2d1f922948945c72 Mon Sep 17 00:00:00 2001 From: Tim Roes Date: Tue, 26 Apr 2022 16:34:46 +0200 Subject: [PATCH 05/39] Fix broken release stage labels/warnings (#12326) --- airbyte-webapp/src/locales/en.json | 2 +- .../views/Connector/ServiceForm/components/WarningMessage.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-webapp/src/locales/en.json b/airbyte-webapp/src/locales/en.json index ac602b24d50c..2c1378b5b4ad 100644 --- a/airbyte-webapp/src/locales/en.json +++ b/airbyte-webapp/src/locales/en.json @@ -469,7 +469,7 @@ "connector.releaseStage.generally_available": "GA", "connector.releaseStage.alpha.description": "Alpha connectors are in development and support is not provided.", "connector.releaseStage.beta.description": "Beta connectors are in development but stable and reliable and support is provided.", - "component.releaseStage.generally_available.description": "Generally Available (GA) connectors have been deemed ready for use in a production environment and is officially supported by Airbyte. Their documentation is considered sufficient to support widespread adoption.", + "connector.releaseStage.generally_available.description": "Generally Available (GA) connectors have been deemed ready for use in a production environment and is officially supported by Airbyte. Their documentation is considered sufficient to support widespread adoption.", "connector.connectorsInDevelopment.docLink": "See our documentation for more details.", "credits.credits": "Credits", "credits.whatAreCredits": "What are credits?", diff --git a/airbyte-webapp/src/views/Connector/ServiceForm/components/WarningMessage.tsx b/airbyte-webapp/src/views/Connector/ServiceForm/components/WarningMessage.tsx index 9d79737cf1ce..9e047efe6fff 100644 --- a/airbyte-webapp/src/views/Connector/ServiceForm/components/WarningMessage.tsx +++ b/airbyte-webapp/src/views/Connector/ServiceForm/components/WarningMessage.tsx @@ -33,7 +33,7 @@ const WarningMessage: React.FC = ({ stage }) => { {" "} ( From bcf4f89ae83d6a93237b7e77967676e192f2214d Mon Sep 17 00:00:00 2001 From: Greg Solovyev Date: Tue, 26 Apr 2022 08:50:50 -0700 Subject: [PATCH 06/39] Manually bump destination-oracle version (#12292) --- .../init/src/main/resources/seed/destination_definitions.yaml | 2 +- .../init/src/main/resources/seed/destination_specs.yaml | 2 +- airbyte-integrations/connectors/destination-oracle/Dockerfile | 2 +- docs/integrations/destinations/oracle.md | 1 + 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index 402691d0208f..3d1fe3447ffe 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -161,7 +161,7 @@ - name: Oracle destinationDefinitionId: 3986776d-2319-4de9-8af8-db14c0996e72 dockerRepository: airbyte/destination-oracle - dockerImageTag: 0.1.15 + dockerImageTag: 0.1.16 documentationUrl: https://docs.airbyte.io/integrations/destinations/oracle icon: oracle.svg - name: Postgres diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index a8c7019d94d2..1f7bd3ceb0a3 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -2819,7 +2819,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-oracle:0.1.15" +- dockerImage: "airbyte/destination-oracle:0.1.16" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/oracle" connectionSpecification: diff --git a/airbyte-integrations/connectors/destination-oracle/Dockerfile b/airbyte-integrations/connectors/destination-oracle/Dockerfile index 4bf980d8c5c6..8ae09c1d1e30 100644 --- a/airbyte-integrations/connectors/destination-oracle/Dockerfile +++ b/airbyte-integrations/connectors/destination-oracle/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-oracle COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.15 +LABEL io.airbyte.version=0.1.16 LABEL io.airbyte.name=airbyte/destination-oracle diff --git a/docs/integrations/destinations/oracle.md b/docs/integrations/destinations/oracle.md index c2d244980c6f..2878c828e396 100644 --- a/docs/integrations/destinations/oracle.md +++ b/docs/integrations/destinations/oracle.md @@ -92,6 +92,7 @@ Airbite has the ability to connect to the Oracle source with 3 network connectiv | Version | Date | Pull Request | Subject | |:--------| :--- |:---------------------------------------------------------|:----------------------------------------------------------------------------------------------------| +| 0.1.16 | 2022-04-06 | [11514](https://github.com/airbytehq/airbyte/pull/11514) | Bump mina-sshd from 2.7.0 to 2.8.0 | | 0.1.15 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling and remove DBT support | | 0.1.14 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | (unpublished) Add `-XX:+ExitOnOutOfMemoryError` JVM option | | 0.1.13 | 2021-12-29 | [\#9177](https://github.com/airbytehq/airbyte/pull/9177) | Update connector fields title/description | From f9ac061979f721338489a253b9edfa8dee4c242e Mon Sep 17 00:00:00 2001 From: Greg Solovyev Date: Tue, 26 Apr 2022 08:51:51 -0700 Subject: [PATCH 07/39] Manually bump source-clickhouse version (#12293) --- .../init/src/main/resources/seed/source_definitions.yaml | 2 +- airbyte-config/init/src/main/resources/seed/source_specs.yaml | 2 +- airbyte-integrations/connectors/source-clickhouse/Dockerfile | 2 +- docs/integrations/sources/clickhouse.md | 1 + 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 2c68ec2b22e4..1beec7bb3ff6 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -133,7 +133,7 @@ - name: ClickHouse sourceDefinitionId: bad83517-5e54-4a3d-9b53-63e85fbd4d7c dockerRepository: airbyte/source-clickhouse - dockerImageTag: 0.1.8 + dockerImageTag: 0.1.10 documentationUrl: https://docs.airbyte.io/integrations/sources/clickhouse icon: cliskhouse.svg sourceType: database diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 4d7efe975a2a..430de5053edd 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -1126,7 +1126,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-clickhouse:0.1.8" +- dockerImage: "airbyte/source-clickhouse:0.1.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/clickhouse" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-clickhouse/Dockerfile b/airbyte-integrations/connectors/source-clickhouse/Dockerfile index 11e99c4730d9..1f9b1623150c 100644 --- a/airbyte-integrations/connectors/source-clickhouse/Dockerfile +++ b/airbyte-integrations/connectors/source-clickhouse/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-clickhouse COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.9 +LABEL io.airbyte.version=0.1.10 LABEL io.airbyte.name=airbyte/source-clickhouse diff --git a/docs/integrations/sources/clickhouse.md b/docs/integrations/sources/clickhouse.md index cb6f61369dff..38e2e2295131 100644 --- a/docs/integrations/sources/clickhouse.md +++ b/docs/integrations/sources/clickhouse.md @@ -78,6 +78,7 @@ Using this feature requires additional configuration, when creating the source. | Version | Date | Pull Request | Subject | |:--------| :--- |:---------------------------------------------------------|:-----------------------------------------------------------------| +| 0.1.10 | 2022-04-12 | [11729](https://github.com/airbytehq/airbyte/pull/11514) | Bump mina-sshd from 2.7.0 to 2.8.0 | | 0.1.9 | 2022-02-09 | [\#10214](https://github.com/airbytehq/airbyte/pull/10214) | Fix exception in case `password` field is not provided | | 0.1.8 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | | 0.1.7 | 2021-12-24 | [\#8958](https://github.com/airbytehq/airbyte/pull/8958) | Add support for JdbcType.ARRAY | From cce3b21506790f67851ac6be810eebea9187d1cb Mon Sep 17 00:00:00 2001 From: Greg Solovyev Date: Tue, 26 Apr 2022 08:52:46 -0700 Subject: [PATCH 08/39] Manually bump destination-mssql version (#12291) --- .../init/src/main/resources/seed/destination_definitions.yaml | 2 +- .../init/src/main/resources/seed/destination_specs.yaml | 2 +- airbyte-integrations/connectors/destination-mssql/Dockerfile | 2 +- docs/integrations/destinations/mssql.md | 1 + 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index 3d1fe3447ffe..504c01204a69 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -137,7 +137,7 @@ - name: MS SQL Server destinationDefinitionId: d4353156-9217-4cad-8dd7-c108fd4f74cf dockerRepository: airbyte/destination-mssql - dockerImageTag: 0.1.16 + dockerImageTag: 0.1.17 documentationUrl: https://docs.airbyte.io/integrations/destinations/mssql icon: mssql.svg - name: MeiliSearch diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index 1f7bd3ceb0a3..1ec8f09260c3 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -2267,7 +2267,7 @@ supportsDBT: false supported_destination_sync_modes: - "append" -- dockerImage: "airbyte/destination-mssql:0.1.16" +- dockerImage: "airbyte/destination-mssql:0.1.17" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mssql" connectionSpecification: diff --git a/airbyte-integrations/connectors/destination-mssql/Dockerfile b/airbyte-integrations/connectors/destination-mssql/Dockerfile index b746d47dadf2..4363d20bf1c1 100644 --- a/airbyte-integrations/connectors/destination-mssql/Dockerfile +++ b/airbyte-integrations/connectors/destination-mssql/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-mssql COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.16 +LABEL io.airbyte.version=0.1.17 LABEL io.airbyte.name=airbyte/destination-mssql diff --git a/docs/integrations/destinations/mssql.md b/docs/integrations/destinations/mssql.md index b85e0b14c6eb..5dd2a1ef4548 100644 --- a/docs/integrations/destinations/mssql.md +++ b/docs/integrations/destinations/mssql.md @@ -121,6 +121,7 @@ Using this feature requires additional configuration, when creating the source. | Version | Date | Pull Request | Subject | |:--------| :--- |:---------------------------------------------------------|:----------------------------------------------------------------------------------------------------| +| 0.1.17 | 2022-04-05 | [11729](https://github.com/airbytehq/airbyte/pull/11729) | Bump mina-sshd from 2.7.0 to 2.8.0 | | 0.1.15 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | | 0.1.14 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | | 0.1.13 | 2021-12-28 | [\#9158](https://github.com/airbytehq/airbyte/pull/9158) | Update connector fields title/description | From 2a1b7b802ad59ad5eb11f704d660b56ae50b4c48 Mon Sep 17 00:00:00 2001 From: Daemonxiao <35677990+Daemonxiao@users.noreply.github.com> Date: Wed, 27 Apr 2022 00:50:37 +0800 Subject: [PATCH 09/39] Source TiDB: add icon (#12337) --- .../init/src/main/resources/icons/tidb.svg | 19 +++++++++++++++++++ .../resources/seed/source_definitions.yaml | 1 + 2 files changed, 20 insertions(+) create mode 100644 airbyte-config/init/src/main/resources/icons/tidb.svg diff --git a/airbyte-config/init/src/main/resources/icons/tidb.svg b/airbyte-config/init/src/main/resources/icons/tidb.svg new file mode 100644 index 000000000000..b1cd7abfd4c4 --- /dev/null +++ b/airbyte-config/init/src/main/resources/icons/tidb.svg @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 1beec7bb3ff6..e0621790f23f 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -794,6 +794,7 @@ dockerRepository: airbyte/source-tidb dockerImageTag: 0.1.0 documentationUrl: https://docs.airbyte.io/integrations/sources/tidb + icon: tidb.svg sourceType: database - name: TikTok Marketing sourceDefinitionId: 4bfac00d-ce15-44ff-95b9-9e3c3e8fbd35 From f1c6f1964cacc0c19567101f2a639388de76f0ba Mon Sep 17 00:00:00 2001 From: Prasanna Ram Venkatachalam <47996246+prasrvenkat@users.noreply.github.com> Date: Tue, 26 Apr 2022 11:19:32 -0700 Subject: [PATCH 10/39] Pod Sweeper: fix parsing missing date (#11781) --- kube/resources/pod-sweeper.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/kube/resources/pod-sweeper.yaml b/kube/resources/pod-sweeper.yaml index dd752ac6ed69..77dd0a2d34d5 100644 --- a/kube/resources/pod-sweeper.yaml +++ b/kube/resources/pod-sweeper.yaml @@ -25,6 +25,8 @@ data: # Longer time window for pods in error (to debug) NON_SUCCESS_DATE_STR=`date -d 'now - 24 hours' --utc -Ins` NON_SUCCESS_DATE=`date -d $NON_SUCCESS_DATE_STR +%s` + # default time to use in case its unavailable from kubectl + DEFAULT=`date --utc -Ins` ( IFS=$'\n' for POD in `get_worker_pods`; do @@ -32,7 +34,7 @@ data: POD_NAME=`echo $POD | cut -d " " -f 1` POD_STATUS=`echo $POD | cut -d " " -f 2` POD_DATE_STR=`echo $POD | cut -d " " -f 3` - POD_DATE=`date -d $POD_DATE_STR '+%s'` + POD_DATE=`date -d ${POD_DATE_STR:-$DEFAULT} '+%s'` if [ "$POD_STATUS" = "Succeeded" ]; then if [ "$POD_DATE" -lt "$SUCCESS_DATE" ]; then delete_worker_pod "$POD_NAME" "$POD_STATUS" "$POD_DATE_STR" From 597d6ccbb870e814e4c779215aab0f84d3ad6cbb Mon Sep 17 00:00:00 2001 From: Greg Solovyev Date: Tue, 26 Apr 2022 11:41:59 -0700 Subject: [PATCH 11/39] Reconcile versions in git with versions in docker hub (#12368) --- .../seed/destination_definitions.yaml | 2 +- .../resources/seed/destination_specs.yaml | 2 +- .../resources/seed/source_definitions.yaml | 8 +- .../src/main/resources/seed/source_specs.yaml | 8 +- .../Dockerfile | 2 +- .../destination-clickhouse/Dockerfile | 2 +- .../Dockerfile | 2 +- .../connectors/source-cockroachdb/Dockerfile | 2 +- .../connectors/source-mssql/Dockerfile | 2 +- .../connectors/source-mysql/Dockerfile | 2 +- .../connectors/source-postgres/Dockerfile | 2 +- docs/integrations/destinations/clickhouse.md | 1 + docs/integrations/sources/cockroachdb.md | 24 +++--- docs/integrations/sources/mssql.md | 5 +- docs/integrations/sources/mysql.md | 79 ++++++++++--------- docs/integrations/sources/postgres.md | 1 + 16 files changed, 75 insertions(+), 69 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index 504c01204a69..bcf75228f113 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -60,7 +60,7 @@ - name: Clickhouse destinationDefinitionId: ce0d828e-1dc4-496c-b122-2da42e637e48 dockerRepository: airbyte/destination-clickhouse - dockerImageTag: 0.1.4 + dockerImageTag: 0.1.5 documentationUrl: https://docs.airbyte.io/integrations/destinations/clickhouse - name: DynamoDB destinationDefinitionId: 8ccd8909-4e99-4141-b48d-4984b70b2d89 diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index 1ec8f09260c3..af9b9c4fb3d5 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -793,7 +793,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-clickhouse:0.1.4" +- dockerImage: "airbyte/destination-clickhouse:0.1.5" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/clickhouse" connectionSpecification: diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index e0621790f23f..f87f0b0933a1 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -147,7 +147,7 @@ - name: Cockroachdb sourceDefinitionId: 9fa5862c-da7c-11eb-8d19-0242ac130003 dockerRepository: airbyte/source-cockroachdb - dockerImageTag: 0.1.10 + dockerImageTag: 0.1.11 documentationUrl: https://docs.airbyte.io/integrations/sources/cockroachdb icon: cockroachdb.svg sourceType: database @@ -459,7 +459,7 @@ - name: Microsoft SQL Server (MSSQL) sourceDefinitionId: b5ea17b1-f170-46dc-bc31-cc744ca984c1 dockerRepository: airbyte/source-mssql - dockerImageTag: 0.3.19 + dockerImageTag: 0.3.21 documentationUrl: https://docs.airbyte.io/integrations/sources/mssql icon: mssql.svg sourceType: database @@ -501,7 +501,7 @@ - name: MySQL sourceDefinitionId: 435bb9a5-7887-4809-aa58-28c27df0d7ad dockerRepository: airbyte/source-mysql - dockerImageTag: 0.5.6 + dockerImageTag: 0.5.9 documentationUrl: https://docs.airbyte.io/integrations/sources/mysql icon: mysql.svg sourceType: database @@ -625,7 +625,7 @@ - name: Postgres sourceDefinitionId: decd338e-5647-4c0b-adf4-da0e75f5a750 dockerRepository: airbyte/source-postgres - dockerImageTag: 0.4.10 + dockerImageTag: 0.4.11 documentationUrl: https://docs.airbyte.io/integrations/sources/postgres icon: postgresql.svg sourceType: database diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 430de5053edd..09588153baf0 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -1304,7 +1304,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-cockroachdb:0.1.10" +- dockerImage: "airbyte/source-cockroachdb:0.1.11" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/cockroachdb" connectionSpecification: @@ -4667,7 +4667,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-mssql:0.3.19" +- dockerImage: "airbyte/source-mssql:0.3.21" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mssql" connectionSpecification: @@ -5366,7 +5366,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-mysql:0.5.6" +- dockerImage: "airbyte/source-mysql:0.5.9" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/mysql" connectionSpecification: @@ -6476,7 +6476,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-postgres:0.4.10" +- dockerImage: "airbyte/source-postgres:0.4.11" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/postgres" connectionSpecification: diff --git a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/Dockerfile index 9fc1aa0f33db..8d3ab609c1f8 100644 --- a/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-clickhouse-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-clickhouse-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.4 +LABEL io.airbyte.version=0.1.6 LABEL io.airbyte.name=airbyte/destination-clickhouse-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-clickhouse/Dockerfile b/airbyte-integrations/connectors/destination-clickhouse/Dockerfile index dd60f557cc1c..7aa72a71292e 100644 --- a/airbyte-integrations/connectors/destination-clickhouse/Dockerfile +++ b/airbyte-integrations/connectors/destination-clickhouse/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-clickhouse COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.4 +LABEL io.airbyte.version=0.1.5 LABEL io.airbyte.name=airbyte/destination-clickhouse diff --git a/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/Dockerfile index aebdad56b6b7..5e07282f5112 100644 --- a/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-cockroachdb-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-cockroachdb-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.7 +LABEL io.airbyte.version=0.1.8 LABEL io.airbyte.name=airbyte/source-cockroachdb-strict-encrypt diff --git a/airbyte-integrations/connectors/source-cockroachdb/Dockerfile b/airbyte-integrations/connectors/source-cockroachdb/Dockerfile index d0a0ca9cff1e..8a8456ed239d 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/Dockerfile +++ b/airbyte-integrations/connectors/source-cockroachdb/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-cockroachdb COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.10 +LABEL io.airbyte.version=0.1.11 LABEL io.airbyte.name=airbyte/source-cockroachdb diff --git a/airbyte-integrations/connectors/source-mssql/Dockerfile b/airbyte-integrations/connectors/source-mssql/Dockerfile index 3f13b62969d2..f826af5a2e52 100644 --- a/airbyte-integrations/connectors/source-mssql/Dockerfile +++ b/airbyte-integrations/connectors/source-mssql/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-mssql COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.3.19 +LABEL io.airbyte.version=0.3.21 LABEL io.airbyte.name=airbyte/source-mssql diff --git a/airbyte-integrations/connectors/source-mysql/Dockerfile b/airbyte-integrations/connectors/source-mysql/Dockerfile index 4719a21c73fa..952f96f2c0d7 100644 --- a/airbyte-integrations/connectors/source-mysql/Dockerfile +++ b/airbyte-integrations/connectors/source-mysql/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-mysql COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.5.6 +LABEL io.airbyte.version=0.5.9 LABEL io.airbyte.name=airbyte/source-mysql diff --git a/airbyte-integrations/connectors/source-postgres/Dockerfile b/airbyte-integrations/connectors/source-postgres/Dockerfile index 448a9aa68c6d..6e5cc0d20377 100644 --- a/airbyte-integrations/connectors/source-postgres/Dockerfile +++ b/airbyte-integrations/connectors/source-postgres/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-postgres COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.4.10 +LABEL io.airbyte.version=0.4.11 LABEL io.airbyte.name=airbyte/source-postgres diff --git a/docs/integrations/destinations/clickhouse.md b/docs/integrations/destinations/clickhouse.md index 7fd35a2f8908..48066e00068b 100644 --- a/docs/integrations/destinations/clickhouse.md +++ b/docs/integrations/destinations/clickhouse.md @@ -79,6 +79,7 @@ Therefore, Airbyte ClickHouse destination will create tables and schemas using t | Version | Date | Pull Request | Subject | |:--------|:-----------| :--- |:---------------------------------------------| +| 0.1.5 | 2022-04-06 | [11729](https://github.com/airbytehq/airbyte/pull/11729) | Bump mina-sshd from 2.7.0 to 2.8.0 | | 0.1.4 | 2022-02-25 | [10421](https://github.com/airbytehq/airbyte/pull/10421) | Refactor JDBC parameters handling | | 0.1.3 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | | 0.1.1 | 2021-12-21 | [\#8982](https://github.com/airbytehq/airbyte/pull/8982) | Set isSchemaRequired to false | diff --git a/docs/integrations/sources/cockroachdb.md b/docs/integrations/sources/cockroachdb.md index cae8045ecc96..d31a5571f4cb 100644 --- a/docs/integrations/sources/cockroachdb.md +++ b/docs/integrations/sources/cockroachdb.md @@ -93,22 +93,24 @@ Your database user should now be ready for use with Airbyte. ## Changelog -| Version | Date | Pull Request | Subject | -| :--- | :--- | :--- | :--- | -| 0.1.10 | 2022-02-24 | [10235](https://github.com/airbytehq/airbyte/pull/10235) | Fix Replication Failure due Multiple portal opens | -| 0.1.9 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | -| 0.1.8 | 2022-02-18 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Updated timestamp transformation with microseconds | -| 0.1.7 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | -| 0.1.6 | 2022-02-08 | [10173](https://github.com/airbytehq/airbyte/pull/10173) | Improved discovering tables in case if user does not have permissions to any table | -| 0.1.5 | 2021-12-24 | [9004](https://github.com/airbytehq/airbyte/pull/9004) | User can see only permmited tables during discovery | -| 0.1.4 | 2021-12-24 | [8958](https://github.com/airbytehq/airbyte/pull/8958) | Add support for JdbcType.ARRAY | -| 0.1.3 | 2021-10-10 | [7819](https://github.com/airbytehq/airbyte/pull/7819) | Fixed Datatype errors during Cockroach DB parsing | -| 0.1.2 | 2021-08-13 | [4699](https://github.com/airbytehq/airbyte/pull/4699) | Added json config validator | +| Version | Date | Pull Request | Subject | +|:--------|:-----------| :--- | :--- | +| 0.1.11 | 2022-04-06 | [11729](https://github.com/airbytehq/airbyte/pull/11729) | Bump mina-sshd from 2.7.0 to 2.8.0 | +| 0.1.10 | 2022-02-24 | [10235](https://github.com/airbytehq/airbyte/pull/10235) | Fix Replication Failure due Multiple portal opens | +| 0.1.9 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | +| 0.1.8 | 2022-02-18 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Updated timestamp transformation with microseconds | +| 0.1.7 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | +| 0.1.6 | 2022-02-08 | [10173](https://github.com/airbytehq/airbyte/pull/10173) | Improved discovering tables in case if user does not have permissions to any table | +| 0.1.5 | 2021-12-24 | [9004](https://github.com/airbytehq/airbyte/pull/9004) | User can see only permmited tables during discovery | +| 0.1.4 | 2021-12-24 | [8958](https://github.com/airbytehq/airbyte/pull/8958) | Add support for JdbcType.ARRAY | +| 0.1.3 | 2021-10-10 | [7819](https://github.com/airbytehq/airbyte/pull/7819) | Fixed Datatype errors during Cockroach DB parsing | +| 0.1.2 | 2021-08-13 | [4699](https://github.com/airbytehq/airbyte/pull/4699) | Added json config validator | ## Changelog source-cockroachdb-strict-encrypt | Version | Date | Pull Request | Subject | |:--------| :--- | :--- | :--- | +| 0.1.8 | 2022-04-06 | [11729](https://github.com/airbytehq/airbyte/pull/11729) | Bump mina-sshd from 2.7.0 to 2.8.0 | | 0.1.6 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | | 0.1.5 | 2022-02-18 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Updated timestamp transformation with microseconds | | 0.1.4 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | diff --git a/docs/integrations/sources/mssql.md b/docs/integrations/sources/mssql.md index 6e9de424edc7..69495aec3556 100644 --- a/docs/integrations/sources/mssql.md +++ b/docs/integrations/sources/mssql.md @@ -292,8 +292,9 @@ If you do not see a type in this list, assume that it is coerced into a string. ## Changelog -| Version | Date | Pull Request | Subject | -|:------- | :--------- | :----------------------------------------------------- | :------------------------------------- | +| Version | Date | Pull Request | Subject | +|:--------|:-----------| :----------------------------------------------------- | :------------------------------------- | +| 0.3.21 | 2022-04-11 | [11729](https://github.com/airbytehq/airbyte/pull/11729) | Bump mina-sshd from 2.7.0 to 2.8.0 | | 0.3.19 | 2022-03-31 | [11495](https://github.com/airbytehq/airbyte/pull/11495) | Adds Support to Chinese MSSQL Server Agent | | 0.3.18 | 2022-03-29 | [11010](https://github.com/airbytehq/airbyte/pull/11010) | Adds JDBC Params | | 0.3.17 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | diff --git a/docs/integrations/sources/mysql.md b/docs/integrations/sources/mysql.md index d197dfd347b0..e9d8298526d0 100644 --- a/docs/integrations/sources/mysql.md +++ b/docs/integrations/sources/mysql.md @@ -183,42 +183,43 @@ If you do not see a type in this list, assume that it is coerced into a string. ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-------------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------| -| 0.5.6 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | -| 0.5.5 | 2022-02-18 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Updated timestamp transformation with microseconds | -| 0.5.4 | 2022-02-11 | [10251](https://github.com/airbytehq/airbyte/issues/10251) | bug Source MySQL CDC: sync failed when has Zero-date value in mandatory column | -| 0.5.2 | 2021-12-14 | [6425](https://github.com/airbytehq/airbyte/issues/6425) | MySQL CDC sync fails because starting binlog position not found in DB | -| 0.5.1 | 2021-12-13 | [8582](https://github.com/airbytehq/airbyte/pull/8582) | Update connector fields title/description | -| 0.5.0 | 2021-12-11 | [7970](https://github.com/airbytehq/airbyte/pull/7970) | Support all MySQL types | -| 0.4.13 | 2021-12-03 | [8335](https://github.com/airbytehq/airbyte/pull/8335) | Source-MySql: do not check cdc required param binlog_row_image for standard replication | -| 0.4.12 | 2021-12-01 | [8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | -| 0.4.11 | 2021-11-19 | [8047](https://github.com/airbytehq/airbyte/pull/8047) | Source MySQL: transform binary data base64 format | -| 0.4.10 | 2021-11-15 | [7820](https://github.com/airbytehq/airbyte/pull/7820) | Added basic performance test | -| 0.4.9 | 2021-11-02 | [7559](https://github.com/airbytehq/airbyte/pull/7559) | Correctly process large unsigned short integer values which may fall outside java's `Short` data type capability | -| 0.4.8 | 2021-09-16 | [6093](https://github.com/airbytehq/airbyte/pull/6093) | Improve reliability of processing various data types like decimals, dates, datetime, binary, and text | -| 0.4.7 | 2021-09-30 | [6585](https://github.com/airbytehq/airbyte/pull/6585) | Improved SSH Tunnel key generation steps | -| 0.4.6 | 2021-09-29 | [6510](https://github.com/airbytehq/airbyte/pull/6510) | Support SSL connection | -| 0.4.5 | 2021-09-17 | [6146](https://github.com/airbytehq/airbyte/pull/6146) | Added option to connect to DB via SSH | -| 0.4.1 | 2021-07-23 | [4956](https://github.com/airbytehq/airbyte/pull/4956) | Fix log link | -| 0.3.7 | 2021-06-09 | [3179](https://github.com/airbytehq/airbyte/pull/3973) | Add AIRBYTE\_ENTRYPOINT for Kubernetes support | -| 0.3.6 | 2021-06-09 | [3966](https://github.com/airbytehq/airbyte/pull/3966) | Fix excessive logging for CDC method | -| 0.3.5 | 2021-06-07 | [3890](https://github.com/airbytehq/airbyte/pull/3890) | Fix CDC handle tinyint\(1\) and boolean types | -| 0.3.4 | 2021-06-04 | [3846](https://github.com/airbytehq/airbyte/pull/3846) | Fix max integer value failure | -| 0.3.3 | 2021-06-02 | [3789](https://github.com/airbytehq/airbyte/pull/3789) | MySQL CDC poll wait 5 minutes when not received a single record | -| 0.3.2 | 2021-06-01 | [3757](https://github.com/airbytehq/airbyte/pull/3757) | MySQL CDC poll 5s to 5 min | -| 0.3.1 | 2021-06-01 | [3505](https://github.com/airbytehq/airbyte/pull/3505) | Implemented MySQL CDC | -| 0.3.0 | 2021-04-21 | [2990](https://github.com/airbytehq/airbyte/pull/2990) | Support namespaces | -| 0.2.5 | 2021-04-15 | [2899](https://github.com/airbytehq/airbyte/pull/2899) | Fix bug in tests | -| 0.2.4 | 2021-03-28 | [2600](https://github.com/airbytehq/airbyte/pull/2600) | Add NCHAR and NVCHAR support to DB and cursor type casting | -| 0.2.3 | 2021-03-26 | [2611](https://github.com/airbytehq/airbyte/pull/2611) | Add an optional `jdbc_url_params` in parameters | -| 0.2.2 | 2021-03-26 | [2460](https://github.com/airbytehq/airbyte/pull/2460) | Destination supports destination sync mode | -| 0.2.1 | 2021-03-18 | [2488](https://github.com/airbytehq/airbyte/pull/2488) | Sources support primary keys | -| 0.2.0 | 2021-03-09 | [2238](https://github.com/airbytehq/airbyte/pull/2238) | Protocol allows future/unknown properties | -| 0.1.10 | 2021-02-02 | [1887](https://github.com/airbytehq/airbyte/pull/1887) | Migrate AbstractJdbcSource to use iterators | -| 0.1.9 | 2021-01-25 | [1746](https://github.com/airbytehq/airbyte/pull/1746) | Fix NPE in State Decorator | -| 0.1.8 | 2021-01-19 | [1724](https://github.com/airbytehq/airbyte/pull/1724) | Fix JdbcSource handling of tables with same names in different schemas | -| 0.1.7 | 2021-01-14 | [1655](https://github.com/airbytehq/airbyte/pull/1655) | Fix JdbcSource OOM | -| 0.1.6 | 2021-01-08 | [1307](https://github.com/airbytehq/airbyte/pull/1307) | Migrate Postgres and MySQL to use new JdbcSource | -| 0.1.5 | 2020-12-11 | [1267](https://github.com/airbytehq/airbyte/pull/1267) | Support incremental sync | -| 0.1.4 | 2020-11-30 | [1046](https://github.com/airbytehq/airbyte/pull/1046) | Add connectors using an index YAML file | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------| +| 0.5.9 | 2022-04-06 | [11729](https://github.com/airbytehq/airbyte/pull/11729) | Bump mina-sshd from 2.7.0 to 2.8.0 | +| 0.5.6 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | +| 0.5.5 | 2022-02-18 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Updated timestamp transformation with microseconds | +| 0.5.4 | 2022-02-11 | [10251](https://github.com/airbytehq/airbyte/issues/10251) | bug Source MySQL CDC: sync failed when has Zero-date value in mandatory column | +| 0.5.2 | 2021-12-14 | [6425](https://github.com/airbytehq/airbyte/issues/6425) | MySQL CDC sync fails because starting binlog position not found in DB | +| 0.5.1 | 2021-12-13 | [8582](https://github.com/airbytehq/airbyte/pull/8582) | Update connector fields title/description | +| 0.5.0 | 2021-12-11 | [7970](https://github.com/airbytehq/airbyte/pull/7970) | Support all MySQL types | +| 0.4.13 | 2021-12-03 | [8335](https://github.com/airbytehq/airbyte/pull/8335) | Source-MySql: do not check cdc required param binlog_row_image for standard replication | +| 0.4.12 | 2021-12-01 | [8371](https://github.com/airbytehq/airbyte/pull/8371) | Fixed incorrect handling "\n" in ssh key | +| 0.4.11 | 2021-11-19 | [8047](https://github.com/airbytehq/airbyte/pull/8047) | Source MySQL: transform binary data base64 format | +| 0.4.10 | 2021-11-15 | [7820](https://github.com/airbytehq/airbyte/pull/7820) | Added basic performance test | +| 0.4.9 | 2021-11-02 | [7559](https://github.com/airbytehq/airbyte/pull/7559) | Correctly process large unsigned short integer values which may fall outside java's `Short` data type capability | +| 0.4.8 | 2021-09-16 | [6093](https://github.com/airbytehq/airbyte/pull/6093) | Improve reliability of processing various data types like decimals, dates, datetime, binary, and text | +| 0.4.7 | 2021-09-30 | [6585](https://github.com/airbytehq/airbyte/pull/6585) | Improved SSH Tunnel key generation steps | +| 0.4.6 | 2021-09-29 | [6510](https://github.com/airbytehq/airbyte/pull/6510) | Support SSL connection | +| 0.4.5 | 2021-09-17 | [6146](https://github.com/airbytehq/airbyte/pull/6146) | Added option to connect to DB via SSH | +| 0.4.1 | 2021-07-23 | [4956](https://github.com/airbytehq/airbyte/pull/4956) | Fix log link | +| 0.3.7 | 2021-06-09 | [3179](https://github.com/airbytehq/airbyte/pull/3973) | Add AIRBYTE\_ENTRYPOINT for Kubernetes support | +| 0.3.6 | 2021-06-09 | [3966](https://github.com/airbytehq/airbyte/pull/3966) | Fix excessive logging for CDC method | +| 0.3.5 | 2021-06-07 | [3890](https://github.com/airbytehq/airbyte/pull/3890) | Fix CDC handle tinyint\(1\) and boolean types | +| 0.3.4 | 2021-06-04 | [3846](https://github.com/airbytehq/airbyte/pull/3846) | Fix max integer value failure | +| 0.3.3 | 2021-06-02 | [3789](https://github.com/airbytehq/airbyte/pull/3789) | MySQL CDC poll wait 5 minutes when not received a single record | +| 0.3.2 | 2021-06-01 | [3757](https://github.com/airbytehq/airbyte/pull/3757) | MySQL CDC poll 5s to 5 min | +| 0.3.1 | 2021-06-01 | [3505](https://github.com/airbytehq/airbyte/pull/3505) | Implemented MySQL CDC | +| 0.3.0 | 2021-04-21 | [2990](https://github.com/airbytehq/airbyte/pull/2990) | Support namespaces | +| 0.2.5 | 2021-04-15 | [2899](https://github.com/airbytehq/airbyte/pull/2899) | Fix bug in tests | +| 0.2.4 | 2021-03-28 | [2600](https://github.com/airbytehq/airbyte/pull/2600) | Add NCHAR and NVCHAR support to DB and cursor type casting | +| 0.2.3 | 2021-03-26 | [2611](https://github.com/airbytehq/airbyte/pull/2611) | Add an optional `jdbc_url_params` in parameters | +| 0.2.2 | 2021-03-26 | [2460](https://github.com/airbytehq/airbyte/pull/2460) | Destination supports destination sync mode | +| 0.2.1 | 2021-03-18 | [2488](https://github.com/airbytehq/airbyte/pull/2488) | Sources support primary keys | +| 0.2.0 | 2021-03-09 | [2238](https://github.com/airbytehq/airbyte/pull/2238) | Protocol allows future/unknown properties | +| 0.1.10 | 2021-02-02 | [1887](https://github.com/airbytehq/airbyte/pull/1887) | Migrate AbstractJdbcSource to use iterators | +| 0.1.9 | 2021-01-25 | [1746](https://github.com/airbytehq/airbyte/pull/1746) | Fix NPE in State Decorator | +| 0.1.8 | 2021-01-19 | [1724](https://github.com/airbytehq/airbyte/pull/1724) | Fix JdbcSource handling of tables with same names in different schemas | +| 0.1.7 | 2021-01-14 | [1655](https://github.com/airbytehq/airbyte/pull/1655) | Fix JdbcSource OOM | +| 0.1.6 | 2021-01-08 | [1307](https://github.com/airbytehq/airbyte/pull/1307) | Migrate Postgres and MySQL to use new JdbcSource | +| 0.1.5 | 2020-12-11 | [1267](https://github.com/airbytehq/airbyte/pull/1267) | Support incremental sync | +| 0.1.4 | 2020-11-30 | [1046](https://github.com/airbytehq/airbyte/pull/1046) | Add connectors using an index YAML file | diff --git a/docs/integrations/sources/postgres.md b/docs/integrations/sources/postgres.md index facda1f605f2..6b349289c480 100644 --- a/docs/integrations/sources/postgres.md +++ b/docs/integrations/sources/postgres.md @@ -270,6 +270,7 @@ According to Postgres [documentation](https://www.postgresql.org/docs/14/datatyp | Version | Date | Pull Request | Subject | |:--------|:-----------|:-------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------| +| 0.4.11 | 2022-04-11 | [11729](https://github.com/airbytehq/airbyte/pull/11729) | Bump mina-sshd from 2.7.0 to 2.8.0 | | 0.4.10 | 2022-04-08 | [11798](https://github.com/airbytehq/airbyte/pull/11798) | Fixed roles for fetching materialized view processing | | 0.4.8 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | | 0.4.7 | 2022-02-18 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Updated timestamp transformation with microseconds | From 2a8df3342e3ee0a0eb349fa8998eaae4f0ef8dea Mon Sep 17 00:00:00 2001 From: Amruta Ranade <11484018+Amruta-Ranade@users.noreply.github.com> Date: Tue, 26 Apr 2022 15:53:08 -0400 Subject: [PATCH 12/39] Fixed broken things + updated connector status (#12373) --- .../getting-started-with-airbyte-cloud.md | 6 +-- docs/integrations/README.md | 37 ++++++++++++------- docs/operator-guides/upgrading-airbyte.md | 14 +++---- docs/project-overview/changelog/README.md | 2 +- 4 files changed, 34 insertions(+), 25 deletions(-) diff --git a/docs/cloud/getting-started-with-airbyte-cloud.md b/docs/cloud/getting-started-with-airbyte-cloud.md index e74945e10d78..1a0bd4f0b55a 100644 --- a/docs/cloud/getting-started-with-airbyte-cloud.md +++ b/docs/cloud/getting-started-with-airbyte-cloud.md @@ -8,7 +8,7 @@ To use Airbyte Cloud: 1. If you haven't already, [sign up for Airbyte Cloud](https://cloud.airbyte.io/signup?utm_campaign=22Q1_AirbyteCloudSignUpCampaign_Trial&utm_source=Docs&utm_content=SetupGuide). - Airbyte Cloud offers a 14-day free trial with $1000 worth of [credits](https://docs.airbyte.com/getting-started-with-airbyte-cloud/core-concepts#credits), whichever expires first. For more information, see [Pricing](https://airbyte.com/pricing). + Airbyte Cloud offers a 14-day free trial with $1000 worth of [credits](core-concepts.md#credits), whichever expires first. For more information, see [Pricing](https://airbyte.com/pricing). 2. Airbyte will send you an email with a verification link. On clicking the link, you'll be taken to your new workspace. @@ -98,7 +98,7 @@ Setting up a connection involves configuring the following parameters: -For more information, see [Connections and Sync Modes](https://docs.airbyte.com/understanding-airbyte/connections) and [Namespaces](https://docs.airbyte.com/understanding-airbyte/namespaces) +For more information, see [Connections and Sync Modes](../understanding-airbyte/connections/README.md) and [Namespaces](../understanding-airbyte/namespaces.md) To set up a connection: @@ -149,7 +149,7 @@ To set up a connection: :::tip -To better understand the destination namespace configurations, see [Destination Namespace example](https://docs.airbyte.com/understanding-airbyte/namespaces#examples) +To better understand the destination namespace configurations, see [Destination Namespace example](../understanding-airbyte/namespaces.md#examples) ::: 7. (Optional) In the **Destination Stream Prefix (Optional)** field, add a prefix to stream names (for example, adding a prefix `airbyte_` renames `projects` to `airbyte_projects`). diff --git a/docs/integrations/README.md b/docs/integrations/README.md index 850b6cc89a5d..febc68318980 100644 --- a/docs/integrations/README.md +++ b/docs/integrations/README.md @@ -18,8 +18,9 @@ For more information about the grading system, see [Product Release Stages](http | :------------------------------------------------------------------------------------------ | :------------------- | :------------------ | | [3PL Central](sources/tplcentral.md) | Alpha | No | | [Airtable](sources/airtable.md) | Alpha | Yes | -| [Amazon SQS](sources/amazon-sqs.md) | Alpha | Yes | +| [Amazon Ads](sources/amazon-ads.md) | Alpha | No | | [Amazon Seller Partner](sources/amazon-seller-partner.md) | Alpha | No | +| [Amazon SQS](sources/amazon-sqs.md) | Alpha | Yes | | [Amplitude](sources/amplitude.md) | Alpha | Yes | | [Apify Dataset](sources/apify-dataset.md) | Alpha | Yes | | [Appstore](sources/appstore.md) | Alpha | No | @@ -27,16 +28,20 @@ For more information about the grading system, see [Product Release Stages](http | [AWS CloudTrail](sources/aws-cloudtrail.md) | Alpha | Yes | | [Azure Table Storage](sources/azure-table.md) | Alpha | Yes | | [BambooHR](sources/bamboo-hr.md) | Alpha | No | -| [Braintree](sources/braintree.md) | Alpha | Yes | +| [Baton](sources/hellobaton.md) | Alpha | No | | [BigCommerce](sources/bigcommerce.md) | Alpha | Yes | | [BigQuery](sources/bigquery.md) | Alpha | Yes | | [Bing Ads](sources/bing-ads.md) | Alpha | No | +| [Braintree](sources/braintree.md) | Alpha | Yes | | [Cart.com](sources/cart.md) | Alpha | No | | [Chargebee](sources/chargebee.md) | Alpha | Yes | +| [Chargify](sources/chargify.md) | Alpha | No | | [Chartmogul](sources/chartmogul.md) | Alpha | Yes | | [ClickHouse](sources/clickhouse.md) | Alpha | Yes | | [Close.com](sources/close-com.md) | Alpha | Yes | | [CockroachDB](sources/cockroachdb.md) | Alpha | No | +| [Commercetools](sources/commercetools.md) | Alpha | No | +| [Confluence](sources/confulence.md) | Alpha | No | | [Customer.io](sources/customer-io.md) | Alpha | No | | [Db2](sources/db2.md) | Alpha | No | | [Delighted](sources/delighted.md) | Alpha | Yes | @@ -48,13 +53,14 @@ For more information about the grading system, see [Product Release Stages](http | [Facebook Marketing](sources/facebook-marketing.md) | Generally Available | Yes | | [Facebook Pages](sources/facebook-pages.md) | Alpha | No | | [Faker](sources/faker.md) | Alpha | No | -| [Files](sources/file.md) | Alpha | Yes | +| [File](sources/file.md) | Alpha | Yes | | [Flexport](sources/flexport.md) | Alpha | No | | [Freshdesk](sources/freshdesk.md) | Alpha | Yes | +| [Freshsales](sources/freshsales.md) | Alpha | No | +| [Freshservice](sources/freshservice.md) | Alpha | No | | [GitHub](sources/github.md) | Beta | Yes | | [GitLab](sources/gitlab.md) | Alpha | Yes | | [Google Ads](sources/google-ads.md) | Beta | Yes | -| [Google Adwords](sources/google-adwords.md) | Alpha | No | | [Google Analytics v4](sources/google-analytics-v4.md) | Beta | Yes | | [Google Directory](sources/google-directory.md) | Alpha | Yes | | [Google Search Console](sources/google-search-console.md) | Alpha | Yes | @@ -62,28 +68,30 @@ For more information about the grading system, see [Product Release Stages](http | [Google Workspace Admin Reports](sources/google-workspace-admin-reports.md) | Alpha | Yes | | [Greenhouse](sources/greenhouse.md) | Alpha | Yes | | [Harness](sources/harness.md) | Alpha | No | +| [Harvest](sources/harvest.md) | Alpha | No | +| [http-request](sources/http-request.md) | Alpha | No | | [HubSpot](sources/hubspot.md) | Beta | Yes | | [Instagram](sources/instagram.md) | Beta | Yes | | [Intercom](sources/intercom.md) | Beta | Yes | | [Iterable](sources/iterable.md) | Alpha | Yes | | [Jenkins](sources/jenkins.md) | Alpha | No | | [Jira](sources/jira.md) | Alpha | No | +| [Kafka](sources/kafka.md) | Alpha | No | | [Klaviyo](sources/klaviyo.md) | Alpha | Yes | | [Kustomer](sources/kustomer.md) | Alpha | Yes | | [Lemlist](sources/lemlist.md) | Alpha | Yes | +| [Lever](sources/level-hiring.md) | Alpha | No | | [LinkedIn Ads](sources/linkedin-ads.md) | Beta | Yes | | [Linnworks](sources/linnworks.md) | Alpha | Yes | -| [Kustomer](sources/kustomer.md) | Alpha | No | -| [Lever Hiring](sources/lever-hiring.md) | Alpha | No | | [Looker](sources/looker.md) | Alpha | Yes | | [Magento](sources/magento.md) | Alpha | No | | [Mailchimp](sources/mailchimp.md) | Alpha | Yes | | [Marketo](sources/marketo.md) | Alpha | Yes | -| [Microsoft SQL Server (MSSQL)](sources/mssql.md) | Alpha | Yes | | [Microsoft Dynamics AX](sources/microsoft-dynamics-ax.md) | Alpha | No | | [Microsoft Dynamics Customer Engagement](sources/microsoft-dynamics-customer-engagement.md) | Alpha | No | | [Microsoft Dynamics GP](sources/microsoft-dynamics-gp.md) | Alpha | No | | [Microsoft Dynamics NAV](sources/microsoft-dynamics-nav.md) | Alpha | No | +| [Microsoft SQL Server (MSSQL)](sources/mssql.md) | Alpha | Yes | | [Microsoft Teams](sources/microsoft-teams.md) | Alpha | Yes | | [Mixpanel](sources/mixpanel.md) | Alpha | Yes | | [Monday](sources/monday.md) | Alpha | Yes | @@ -105,6 +113,7 @@ For more information about the grading system, see [Product Release Stages](http | [PersistIq](sources/persistiq.md) | Alpha | Yes | | [Pinterest](sources/pinterest.md) | Alpha | No | | [Pipedrive](sources/pipedrive.md) | Alpha | No | +| [Pivotal Tracker](sources/pivotal-tracker.md) | Alpha | No | | [Plaid](sources/plaid.md) | Alpha | No | | [PokéAPI](sources/pokeapi.md) | Alpha | Yes | | [Postgres](sources/postgres.md) | Alpha | Yes | @@ -126,16 +135,17 @@ For more information about the grading system, see [Product Release Stages](http | [Shopify](sources/shopify.md) | Alpha | Yes | | [Short.io](sources/shortio.md) | Alpha | Yes | | [Slack](sources/slack.md) | Alpha | No | -| [Spree Commerce](sources/spree-commerce.md) | Alpha | No | | [Smartsheets](sources/smartsheets.md) | Alpha | No | | [Snapchat Marketing](sources/snapchat-marketing.md) | Alpha | Yes | | [Snowflake](sources/snowflake.md) | Alpha | Yes | +| [Spree Commerce](sources/spree-commerce.md) | Alpha | No | | [Square](sources/square.md) | Alpha | Yes | | [Strava](sources/strava.md) | Alpha | No | | [Stripe](sources/stripe.md) | Beta | Yes | | [Sugar CRM](sources/sugar-crm.md) | Alpha | No | | [SurveyMonkey](sources/surveymonkey.md) | Alpha | No | | [Tempo](sources/tempo.md) | Alpha | Yes | +| [TiDB](sources/tidb.md) | Alpha | No | | [TikTok Marketing](./sources/tiktok-marketing.md) | Alpha | No | | [Trello](sources/trello.md) | Alpha | No | | [Twilio](sources/twilio.md) | Alpha | Yes | @@ -151,6 +161,7 @@ For more information about the grading system, see [Product Release Stages](http | [Zendesk Support](sources/zendesk-support.md) | Beta | Yes | | [Zendesk Talk](sources/zendesk-talk.md) | Alpha | No | | [Zenloop](sources/zenloop.md) | Alpha | Yes | +| [Zoho CRM](sources/zoho-crm.md) | Alpha | No | | [Zoom](sources/zoom.md) | Alpha | No | | [Zuora](sources/zuora.md) | Alpha | Yes | @@ -159,20 +170,21 @@ For more information about the grading system, see [Product Release Stages](http | Connector | Product Release Stage| Available in Cloud? | | :--------------------------------------------------------- | :------------------- | :------------------ | | [Amazon SQS](destinations/amazon-sqs.md) | Alpha | Yes | +| [Amazon Datalake](destinations/aws-datalake.md) | Alpha | No | | [AzureBlobStorage](destinations/azureblobstorage.md) | Alpha | Yes | | [BigQuery](destinations/bigquery.md) | Generally Available | Yes | | [Cassandra](destinations/cassandra.md) | Alpha | Yes | -| [Chargify \(Keen\)](destinations/chargify.md) | Alpha | Yes | +| [Chargify (Keen)](destinations/chargify.md) | Alpha | Yes | | [ClickHouse](destinations/clickhouse.md) | Alpha | Yes | | [Databricks](destinations/databricks.md) | Alpha | Yes | | [DynamoDB](sources/dynamodb.md) | Alpha | Yes | | [Elasticsearch](destinations/elasticsearch.md) | Alpha | Yes | | [End-to-End Testing](destinations/e2e-test.md) | Alpha | Yes | -| [Google Cloud Storage \(GCS\)](destinations/gcs.md) | Beta | Yes | -| [Google Firestore](destinations/firestore.md) | Alpha | Yes | +| [Google Cloud Storage (GCS)](destinations/gcs.md) | Beta | Yes | | [Google Pubsub](destinations/pubsub.md) | Alpha | Yes | | [Kafka](destinations/kafka.md) | Alpha | No | | [Keen](destinations/keen.md) | Alpha | No | +| [Kinesis](destinations/kinesis.md) | Alpha | No | | [Local CSV](destinations/local-csv.md) | Alpha | No | | [Local JSON](destinations/local-json.md) | Alpha | No | | [MariaDB ColumnStore](destinations/mariadb-columnstore.md) | Alpha | Yes | @@ -192,7 +204,4 @@ For more information about the grading system, see [Product Release Stages](http | [Scylla](destinations/scylla.md) | Alpha | Yes | | [SFTP JSON](destinations/sftp-json.md) | Alpha | Yes | | [Snowflake](destinations/snowflake.md) | Generally Available | Yes | -| [Scylla](destinations/scylla.md) | Alpha | No | -| [Redis](destinations/redis.md) | Alpha | No | -| [Kinesis](destinations/kinesis.md) | Alpha | No | | [Streamr](destinations/streamr.md) | Alpha | No | diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index 820538738a89..e58aa63432b0 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -10,7 +10,7 @@ When Airbyte is upgraded, it will attempt to upgrade some connector versions. It Airbyte intelligently performs upgrades automatically based off of your version defined in your `.env` file and will handle data migration for you. -If you are running [Airbyte on Kubernetes](../deploying-airbyte/on-kubernetes.md), you will need to use one of the two processes defined [here](https://docs.airbyte.io/upgrading-airbyte#upgrading-k-8-s) that differ based on your Airbyte version. +If you are running [Airbyte on Kubernetes](../deploying-airbyte/on-kubernetes.md), you will need to use one of the two processes defined [here](#upgrading-on-k8s-0270-alpha-and-above) that differ based on your Airbyte version. ## Mandatory Intermediate Upgrade @@ -59,9 +59,9 @@ This will completely reset your Airbyte deployment back to scratch and you will ::: -## Upgrading on K8s \(0.27.0-alpha and above\) +## Upgrading on K8s (0.27.0-alpha and above) -If you are upgrading from \(i.e. your current version of Airbyte is\) Airbyte version **0.27.0-alpha or above** on Kubernetes : +If you are upgrading from (i.e. your current version of Airbyte is) Airbyte version **0.27.0-alpha or above** on Kubernetes : 1. In a terminal, on the host where Airbyte is running, turn off Airbyte. @@ -83,15 +83,15 @@ If you are upgrading from \(i.e. your current version of Airbyte is\) Airbyte ve Run `kubectl port-forward svc/airbyte-webapp-svc 8000:80` to allow access to the UI/API. -## Upgrading on K8s \(0.26.4-alpha and below\) +## Upgrading on K8s (0.26.4-alpha and below) -If you are upgrading from \(i.e. your current version of Airbyte is\) Airbyte version **before 0.27.0-alpha** on Kubernetes we **do not** support automatic migration. Please follow the following steps to upgrade your Airbyte Kubernetes deployment. +If you are upgrading from (i.e. your current version of Airbyte is) Airbyte version **before 0.27.0-alpha** on Kubernetes we **do not** support automatic migration. Please follow the following steps to upgrade your Airbyte Kubernetes deployment. 1. Switching over to your browser, navigate to the Admin page in the UI. Then go to the Configuration Tab. Click Export. This will download a compressed back-up archive \(gzipped tarball\) of all of your Airbyte configuration data and sync history locally. _Note: Any secrets that you have entered into Airbyte will be in this archive, so you should treat it as a secret._ -2. Back to the terminal, migrate the local archive to the new version using the Migration App \(packaged in a docker container\). +2. Back to the terminal, migrate the local archive to the new version using the Migration App (packaged in a docker container). ```bash docker run --rm -v :/config airbyte/migration: --\ @@ -108,7 +108,7 @@ If you are upgrading from \(i.e. your current version of Airbyte is\) Airbyte ve --output /config/airbyte_archive_migrated.tar.gz ``` -3. Turn off Airbyte fully and **\(see warning\)** delete the existing Airbyte Kubernetes volumes. +3. Turn off Airbyte fully and **(see warning)** delete the existing Airbyte Kubernetes volumes. _WARNING: Make sure you have already exported your data \(step 1\). This command is going to delete your data in Kubernetes, you may lose your airbyte configurations!_ diff --git a/docs/project-overview/changelog/README.md b/docs/project-overview/changelog/README.md index 9e92c8f14f2e..97baf9e0fae7 100644 --- a/docs/project-overview/changelog/README.md +++ b/docs/project-overview/changelog/README.md @@ -172,7 +172,7 @@ Hey Airbyte Community! Let's go over all the changes from v.32.1 and prior! But ⚠️ WARNING ⚠️ Upgrading to v.32.0 is equivalent to a major version bump. If your current version is v.32.0, you must upgrade to v.32.0 first before upgrading to any later version -Keep in mind that this upgrade requires your all of your connector Specs to be retrievable, or Airbyte will fail on startup. You can force delete your connector Specs by setting the `VERSION_0_32_0_FORCE_UPGRADE` environment variable to `true`. Steps to specifically check out v.32.0 and details around this breaking change can be found [here](https://docs.airbyte.io/operator-guides/upgrading-airbyte#mandatory-intermediate-upgrade). +Keep in mind that this upgrade requires your all of your connector Specs to be retrievable, or Airbyte will fail on startup. You can force delete your connector Specs by setting the `VERSION_0_32_0_FORCE_UPGRADE` environment variable to `true`. Steps to specifically check out v.32.0 and details around this breaking change can be found [here](../../operator-guides/upgrading-airbyte#mandatory-intermediate-upgrade). *Now back to our regularly scheduled programming.* From 03a6b6b3bff214109b59cd471b9b9e023eac892c Mon Sep 17 00:00:00 2001 From: Anushree Agrawal Date: Tue, 26 Apr 2022 15:07:09 -0500 Subject: [PATCH 13/39] =?UTF-8?q?=F0=9F=8E=89=20=20Source=20Orb:=20enrich?= =?UTF-8?q?=20credits=20ledger=20entries=20with=20cost=20basis=20data,=20d?= =?UTF-8?q?escription,=20and=20update=20expiration=20date=20fields=20(#115?= =?UTF-8?q?28)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add cost basis data to connector and remap block_expiry_date field * Update dockerfile and orb.md * Update orb.md and comments * Add entry_status=committed as filter for getting CreditLedgerEntries * Update version information * Move entry_status filter to the correct endpoint * PR feedback: rename field and add docstring for committed entries * Fix unit tests to include entry_status param * Add a unit test to validate transform behavior * Format using gradlew format * Bump connector veresion in spec and definitions --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 2 +- .../connectors/source-orb/Dockerfile | 2 +- .../source-orb/integration_tests/catalog.json | 12 ++++++---- .../schemas/credits_ledger_entries.json | 10 +++++++- .../source-orb/source_orb/source.py | 16 ++++++++++++- .../unit_tests/test_incremental_streams.py | 23 ++++++++++++++++++- docs/integrations/sources/orb.md | 2 ++ 8 files changed, 59 insertions(+), 10 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index f87f0b0933a1..0904622573af 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -542,7 +542,7 @@ - name: Orb sourceDefinitionId: 7f0455fb-4518-4ec0-b7a3-d808bf8081cc dockerRepository: airbyte/source-orb - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.2 documentationUrl: https://docs.airbyte.io/integrations/sources/orb icon: orb.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 09588153baf0..0db0866bfa11 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -5971,7 +5971,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-orb:0.1.1" +- dockerImage: "airbyte/source-orb:0.1.2" spec: documentationUrl: "https://docs.withorb.com/" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-orb/Dockerfile b/airbyte-integrations/connectors/source-orb/Dockerfile index 103bb600576d..b206e03eeec2 100644 --- a/airbyte-integrations/connectors/source-orb/Dockerfile +++ b/airbyte-integrations/connectors/source-orb/Dockerfile @@ -34,5 +34,5 @@ COPY source_orb ./source_orb ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.version=0.1.2 LABEL io.airbyte.name=airbyte/source-orb diff --git a/airbyte-integrations/connectors/source-orb/integration_tests/catalog.json b/airbyte-integrations/connectors/source-orb/integration_tests/catalog.json index fc5b1de6308e..e388a73fb91c 100644 --- a/airbyte-integrations/connectors/source-orb/integration_tests/catalog.json +++ b/airbyte-integrations/connectors/source-orb/integration_tests/catalog.json @@ -111,10 +111,6 @@ "starting_balance": { "type": "number" }, "ending_balance": { "type": "number" }, "amount": { "type": ["null", "number"] }, - "block_expiry_date": { - "type": ["null", "string"], - "format": "date-time" - }, "created_at": { "type": ["null", "string"], "format": "date-time" }, "entry_type": { "type": "string" }, "expiry_date": { @@ -131,6 +127,14 @@ "id": { "type": "string" }, "external_customer_id": { "type": ["null", "string"] } } + }, + "credit_block": { + "type": "object", + "properties": { + "id": { "type": "string" }, + "expiry_date": { "type": ["null", "string"] }, + "per_unit_cost_basis": { "type": ["null", "string"] } + } } } }, diff --git a/airbyte-integrations/connectors/source-orb/source_orb/schemas/credits_ledger_entries.json b/airbyte-integrations/connectors/source-orb/source_orb/schemas/credits_ledger_entries.json index d101a7fae9ba..c657408cb875 100644 --- a/airbyte-integrations/connectors/source-orb/source_orb/schemas/credits_ledger_entries.json +++ b/airbyte-integrations/connectors/source-orb/source_orb/schemas/credits_ledger_entries.json @@ -31,6 +31,12 @@ }, "customer_id": { "type": "string" + }, + "credit_block_per_unit_cost_basis": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] } }, "required": [ @@ -40,6 +46,8 @@ "amount", "created_at", "customer_id", - "entry_type" + "entry_type", + "credit_block_per_unit_cost_basis", + "description" ] } diff --git a/airbyte-integrations/connectors/source-orb/source_orb/source.py b/airbyte-integrations/connectors/source-orb/source_orb/source.py index 09f71fbdf3eb..c4d15ecc8c91 100644 --- a/airbyte-integrations/connectors/source-orb/source_orb/source.py +++ b/airbyte-integrations/connectors/source-orb/source_orb/source.py @@ -235,11 +235,18 @@ def request_params(self, stream_state: Mapping[str, Any], stream_slice: Mapping[ Request params are based on the specific slice (i.e. customer_id) we are requesting for, and so we need to pull out relevant slice state from the stream state. + Ledger entries can either be `pending` or `committed`. + We're filtering to only return `committed` ledger entries, which are entries that are older than the + reporting grace period (12 hours) and are considered finalized. + `pending` entries can change during the reporting grace period, so we don't want to export those entries. + Note that the user of super() here implies that the state for a specific slice of this stream is of the same format as the stream_state of a regular incremental stream. """ current_customer_state = stream_state.get(stream_slice["customer_id"], {}) - return super().request_params(current_customer_state, **kwargs) + params = super().request_params(current_customer_state, **kwargs) + params["entry_status"] = "committed" + return params def path(self, stream_slice: Mapping[str, Any] = None, **kwargs): """ @@ -268,6 +275,13 @@ def transform_record(self, ledger_entry_record): del ledger_entry_record["customer"] ledger_entry_record["customer_id"] = nested_customer_id + # Un-nest credit_block -> expiry_date into block_expiry_date and per_unit_cost_basis + nested_expiry_date = ledger_entry_record["credit_block"]["expiry_date"] + nested_per_unit_cost_basis = ledger_entry_record["credit_block"]["per_unit_cost_basis"] + del ledger_entry_record["credit_block"] + ledger_entry_record["block_expiry_date"] = nested_expiry_date + ledger_entry_record["credit_block_per_unit_cost_basis"] = nested_per_unit_cost_basis + return ledger_entry_record def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: diff --git a/airbyte-integrations/connectors/source-orb/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-orb/unit_tests/test_incremental_streams.py index fa0a10e3c594..d00dcca8c02f 100644 --- a/airbyte-integrations/connectors/source-orb/unit_tests/test_incremental_streams.py +++ b/airbyte-integrations/connectors/source-orb/unit_tests/test_incremental_streams.py @@ -160,7 +160,7 @@ def test_credits_ledger_entries_stream_slices(mocker): def test_credits_ledger_entries_request_params(mocker, current_stream_state, current_stream_slice, next_page_token): stream = CreditsLedgerEntries() inputs = {"stream_state": current_stream_state, "stream_slice": current_stream_slice, "next_page_token": next_page_token} - expected_params = dict(limit=CreditsLedgerEntries.page_size) + expected_params = dict(limit=CreditsLedgerEntries.page_size, entry_status="committed") current_slice_state = current_stream_state.get(current_stream_slice["customer_id"], {}) if current_slice_state.get("created_at"): expected_params["created_at[gte]"] = current_slice_state["created_at"] @@ -170,6 +170,27 @@ def test_credits_ledger_entries_request_params(mocker, current_stream_state, cur assert stream.request_params(**inputs) == expected_params +def test_credits_ledger_entries_transform_record(mocker): + stream = CreditsLedgerEntries() + ledger_entry_record = { + "event_id": "foo-event-id", + "entry_type": "decrement", + "customer": { + "id": "foo-customer-id", + }, + "credit_block": {"expiry_date": "2023-01-25T12:00:00+00:00", "per_unit_cost_basis": "2.50"}, + } + + # Validate that calling transform record unwraps nested customer and credit block fields. + assert stream.transform_record(ledger_entry_record) == { + "event_id": "foo-event-id", + "entry_type": "decrement", + "customer_id": "foo-customer-id", + "block_expiry_date": "2023-01-25T12:00:00+00:00", + "credit_block_per_unit_cost_basis": "2.50", + } + + @responses.activate def test_credits_ledger_entries_no_matching_events(mocker): stream = CreditsLedgerEntries(string_event_properties_keys=["ping"]) diff --git a/docs/integrations/sources/orb.md b/docs/integrations/sources/orb.md index 79a1890024a1..44ea9e98fb40 100644 --- a/docs/integrations/sources/orb.md +++ b/docs/integrations/sources/orb.md @@ -51,6 +51,8 @@ an Orb Account and API Key. ## Changelog | Version | Date | Pull Request | Subject | +| --- | --- | --- | --- | +| 0.1.2 | 2022-04-20 | [11528](https://github.com/airbytehq/airbyte/pull/11528) | Add cost basis to ledger entries, update expiration date, sync only committed entries | 0.1.1 | 2022-03-03 | [10839](https://github.com/airbytehq/airbyte/pull/10839) | Support ledger entries with numeric properties + schema fixes | 0.1.0 | 2022-02-01 | | New Source: Orb | :--- | :--- | :--- | :--- | From 53e625a511d655763309d421783da8ff37e321e8 Mon Sep 17 00:00:00 2001 From: Greg Solovyev Date: Tue, 26 Apr 2022 14:37:50 -0700 Subject: [PATCH 14/39] Bump mina-sshd from 2.7.0 to 2.8.0 (#12376) this is an attempt to merge the main change from https://github.com/airbytehq/airbyte/pull/11514, which now has multiple conflicts. The gist of the change When creating a Postgres destination connector with SSH tunnel method 'SSH Key Authentication', one is required to provide a RSA key. Creating a rsa-sha2-256 or rsa-sha2-512 key, will result in the error SshException: KeyExchange signature verification failed for key type=ssh-rsa, if you haven't enabled ssh-rsa in the SSH server's host key algorithms. mina-sshd in version 2.7.0 uses the wrong server key signature algorithm during DH group key exchange. https://issues.apache.org/jira/browse/SSHD-1163. Bumping mina-sshd to version 2.8.0 addresses this issue. Changelog https://github.com/apache/mina-sshd/blob/master/docs/changes/2.8.0.md. --- airbyte-integrations/bases/base-java/Dockerfile | 2 +- airbyte-integrations/bases/base-java/build.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/bases/base-java/Dockerfile b/airbyte-integrations/bases/base-java/Dockerfile index ea1844de4e64..67a4d3d8764e 100644 --- a/airbyte-integrations/bases/base-java/Dockerfile +++ b/airbyte-integrations/bases/base-java/Dockerfile @@ -18,5 +18,5 @@ ENV SENTRY_DSN="https://981e729cf92840628b29121e96e958f7@o1009025.ingest.sentry. ENV AIRBYTE_ENTRYPOINT "/airbyte/base.sh" ENTRYPOINT ["/airbyte/base.sh"] -LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.version=0.1.1 LABEL io.airbyte.name=airbyte/integration-base-java diff --git a/airbyte-integrations/bases/base-java/build.gradle b/airbyte-integrations/bases/base-java/build.gradle index a88ccf0f8f9b..c77f75cb38a3 100644 --- a/airbyte-integrations/bases/base-java/build.gradle +++ b/airbyte-integrations/bases/base-java/build.gradle @@ -11,7 +11,7 @@ dependencies { api 'io.sentry:sentry:5.6.0' implementation 'commons-cli:commons-cli:1.4' - implementation 'org.apache.sshd:sshd-mina:2.7.0' + implementation 'org.apache.sshd:sshd-mina:2.8.0' // bouncycastle is pinned to version-match the transitive dependency from kubernetes client-java // because a version conflict causes "parameter object not a ECParameterSpec" on ssh tunnel initiation implementation 'org.bouncycastle:bcprov-jdk15on:1.66' From f816946a09a890c553ee42550d86d6b6ed4b231a Mon Sep 17 00:00:00 2001 From: Ivica Taseski Date: Tue, 26 Apr 2022 23:47:27 +0200 Subject: [PATCH 15/39] Temporal activity heartbeat from incorrect thread fix (#11878) * fix activity context retrieval * add heartbeat unit tests * use cancelation handler for heartbeats * use await for blocking * format code Co-authored-by: Ivica --- .../workers/temporal/CancellationHandler.java | 9 +- .../temporal/TemporalAttemptExecution.java | 7 +- .../workers/temporal/TemporalUtils.java | 17 ++- .../CheckConnectionActivityImpl.java | 12 +- .../catalog/DiscoverCatalogActivityImpl.java | 12 +- .../temporal/spec/SpecActivityImpl.java | 9 +- .../temporal/sync/DbtLauncherWorker.java | 8 +- .../sync/DbtTransformationActivityImpl.java | 61 ++++---- .../workers/temporal/sync/LauncherWorker.java | 9 +- .../sync/NormalizationActivityImpl.java | 23 +-- .../sync/NormalizationLauncherWorker.java | 9 +- .../sync/ReplicationActivityImpl.java | 108 +++++++------- .../sync/ReplicationLauncherWorker.java | 8 +- .../temporal/CancellationHandlerTest.java | 46 ++++++ .../workers/temporal/HeartbeatWorkflow.java | 66 +++++++++ .../workers/temporal/TemporalUtilsTest.java | 135 +++++++++++++++--- 16 files changed, 401 insertions(+), 138 deletions(-) create mode 100644 airbyte-workers/src/test/java/io/airbyte/workers/temporal/CancellationHandlerTest.java create mode 100644 airbyte-workers/src/test/java/io/airbyte/workers/temporal/HeartbeatWorkflow.java diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/CancellationHandler.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/CancellationHandler.java index a68564f868f0..237d8b9fb3e4 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/CancellationHandler.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/CancellationHandler.java @@ -4,7 +4,6 @@ package io.airbyte.workers.temporal; -import io.temporal.activity.Activity; import io.temporal.activity.ActivityExecutionContext; import io.temporal.client.ActivityCompletionException; import org.slf4j.Logger; @@ -18,10 +17,10 @@ class TemporalCancellationHandler implements CancellationHandler { private static final Logger LOGGER = LoggerFactory.getLogger(TemporalCancellationHandler.class); - final ActivityExecutionContext context; + private final ActivityExecutionContext activityContext; - public TemporalCancellationHandler() { - context = Activity.getExecutionContext(); + public TemporalCancellationHandler(ActivityExecutionContext activityContext) { + this.activityContext = activityContext; } /** @@ -48,7 +47,7 @@ public void checkAndHandleCancellation(final Runnable onCancellationCallback) { * {@link TemporalUtils#withBackgroundHeartbeat} for where we actually send heartbeats to ensure * that we don't time out the activity. */ - context.heartbeat(null); + activityContext.heartbeat(null); } catch (final ActivityCompletionException e) { onCancellationCallback.run(); LOGGER.warn("Job either timed out or was cancelled."); diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/TemporalAttemptExecution.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/TemporalAttemptExecution.java index 66ffbc598282..45f7c4aa3f5d 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/TemporalAttemptExecution.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/TemporalAttemptExecution.java @@ -14,6 +14,7 @@ import io.airbyte.workers.Worker; import io.airbyte.workers.WorkerUtils; import io.temporal.activity.Activity; +import io.temporal.activity.ActivityExecutionContext; import java.io.IOException; import java.nio.file.Path; import java.util.concurrent.CompletableFuture; @@ -57,7 +58,8 @@ public TemporalAttemptExecution(final Path workspaceRoot, final Supplier inputSupplier, final CancellationHandler cancellationHandler, final JobPersistence jobPersistence, - final String airbyteVersion) { + final String airbyteVersion, + final Supplier activityContext) { this( workspaceRoot, workerEnvironment, logConfigs, jobRunConfig, @@ -66,7 +68,8 @@ public TemporalAttemptExecution(final Path workspaceRoot, (path -> LogClientSingleton.getInstance().setJobMdc(workerEnvironment, logConfigs, path)), cancellationHandler, jobPersistence, - () -> Activity.getExecutionContext().getInfo().getWorkflowId(), airbyteVersion); + () -> activityContext.get().getInfo().getWorkflowId(), + airbyteVersion); } @VisibleForTesting diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/TemporalUtils.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/TemporalUtils.java index f6c412b3fdda..695242b7b753 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/TemporalUtils.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/TemporalUtils.java @@ -10,7 +10,7 @@ import io.airbyte.config.Configs; import io.airbyte.config.EnvConfigs; import io.airbyte.scheduler.models.JobRunConfig; -import io.temporal.activity.Activity; +import io.temporal.activity.ActivityExecutionContext; import io.temporal.api.common.v1.WorkflowExecution; import io.temporal.api.namespace.v1.NamespaceConfig; import io.temporal.api.namespace.v1.NamespaceInfo; @@ -217,13 +217,14 @@ protected static Set getNamespaces(final WorkflowServiceStubs temporalSe * Runs the code within the supplier while heartbeating in the backgroud. Also makes sure to shut * down the heartbeat server after the fact. */ - public static T withBackgroundHeartbeat(final Callable callable) { + public static T withBackgroundHeartbeat(final Callable callable, + final Supplier activityContext) { final ScheduledExecutorService scheduledExecutor = Executors.newSingleThreadScheduledExecutor(); try { - scheduledExecutor.scheduleAtFixedRate(() -> { - Activity.getExecutionContext().heartbeat(null); - }, 0, SEND_HEARTBEAT_INTERVAL.toSeconds(), TimeUnit.SECONDS); + scheduledExecutor.scheduleAtFixedRate( + () -> new CancellationHandler.TemporalCancellationHandler(activityContext.get()).checkAndHandleCancellation(() -> {}), + 0, SEND_HEARTBEAT_INTERVAL.toSeconds(), TimeUnit.SECONDS); return callable.call(); } catch (final ActivityCompletionException e) { @@ -237,12 +238,14 @@ public static T withBackgroundHeartbeat(final Callable callable) { } } - public static T withBackgroundHeartbeat(final AtomicReference cancellationCallbackRef, final Callable callable) { + public static T withBackgroundHeartbeat(final AtomicReference cancellationCallbackRef, + final Callable callable, + final Supplier activityContext) { final ScheduledExecutorService scheduledExecutor = Executors.newSingleThreadScheduledExecutor(); try { scheduledExecutor.scheduleAtFixedRate(() -> { - final CancellationHandler cancellationHandler = new CancellationHandler.TemporalCancellationHandler(); + final CancellationHandler cancellationHandler = new CancellationHandler.TemporalCancellationHandler(activityContext.get()); cancellationHandler.checkAndHandleCancellation(() -> { if (cancellationCallbackRef != null) { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityImpl.java index 332f20bfe1a6..b9f0be300a78 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityImpl.java @@ -22,8 +22,9 @@ import io.airbyte.workers.process.ProcessFactory; import io.airbyte.workers.temporal.CancellationHandler; import io.airbyte.workers.temporal.TemporalAttemptExecution; +import io.temporal.activity.Activity; +import io.temporal.activity.ActivityExecutionContext; import java.nio.file.Path; -import java.util.function.Supplier; public class CheckConnectionActivityImpl implements CheckConnectionActivity { @@ -63,17 +64,18 @@ public StandardCheckConnectionOutput run(final JobRunConfig jobRunConfig, final StandardCheckConnectionInput input = new StandardCheckConnectionInput() .withConnectionConfiguration(fullConfig); - final Supplier inputSupplier = () -> input; + final ActivityExecutionContext context = Activity.getExecutionContext(); final TemporalAttemptExecution temporalAttemptExecution = new TemporalAttemptExecution<>( workspaceRoot, workerEnvironment, logConfigs, jobRunConfig, getWorkerFactory(launcherConfig), - inputSupplier, - new CancellationHandler.TemporalCancellationHandler(), + () -> input, + new CancellationHandler.TemporalCancellationHandler(context), jobPersistence, - airbyteVersion); + airbyteVersion, + () -> context); return temporalAttemptExecution.get(); } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityImpl.java index d55daa711977..36aa90051d27 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityImpl.java @@ -24,8 +24,9 @@ import io.airbyte.workers.protocols.airbyte.DefaultAirbyteStreamFactory; import io.airbyte.workers.temporal.CancellationHandler; import io.airbyte.workers.temporal.TemporalAttemptExecution; +import io.temporal.activity.Activity; +import io.temporal.activity.ActivityExecutionContext; import java.nio.file.Path; -import java.util.function.Supplier; public class DiscoverCatalogActivityImpl implements DiscoverCatalogActivity { @@ -66,7 +67,7 @@ public AirbyteCatalog run(final JobRunConfig jobRunConfig, final StandardDiscoverCatalogInput input = new StandardDiscoverCatalogInput() .withConnectionConfiguration(fullConfig); - final Supplier inputSupplier = () -> input; + final ActivityExecutionContext context = Activity.getExecutionContext(); final TemporalAttemptExecution temporalAttemptExecution = new TemporalAttemptExecution<>( workspaceRoot, @@ -74,10 +75,11 @@ public AirbyteCatalog run(final JobRunConfig jobRunConfig, logConfigs, jobRunConfig, getWorkerFactory(launcherConfig), - inputSupplier, - new CancellationHandler.TemporalCancellationHandler(), + () -> input, + new CancellationHandler.TemporalCancellationHandler(context), jobPersistence, - airbyteVersion); + airbyteVersion, + () -> context); return temporalAttemptExecution.get(); } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/spec/SpecActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/spec/SpecActivityImpl.java index 7a9c767ea431..728d707afa7a 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/spec/SpecActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/spec/SpecActivityImpl.java @@ -20,6 +20,8 @@ import io.airbyte.workers.process.ProcessFactory; import io.airbyte.workers.temporal.CancellationHandler; import io.airbyte.workers.temporal.TemporalAttemptExecution; +import io.temporal.activity.Activity; +import io.temporal.activity.ActivityExecutionContext; import java.nio.file.Path; import java.util.function.Supplier; @@ -52,6 +54,8 @@ public SpecActivityImpl(final WorkerConfigs workerConfigs, public ConnectorSpecification run(final JobRunConfig jobRunConfig, final IntegrationLauncherConfig launcherConfig) { final Supplier inputSupplier = () -> new JobGetSpecConfig().withDockerImage(launcherConfig.getDockerImage()); + final ActivityExecutionContext context = Activity.getExecutionContext(); + final TemporalAttemptExecution temporalAttemptExecution = new TemporalAttemptExecution<>( workspaceRoot, workerEnvironment, @@ -59,9 +63,10 @@ public ConnectorSpecification run(final JobRunConfig jobRunConfig, final Integra jobRunConfig, getWorkerFactory(launcherConfig), inputSupplier, - new CancellationHandler.TemporalCancellationHandler(), + new CancellationHandler.TemporalCancellationHandler(context), jobPersistence, - airbyteVersion); + airbyteVersion, + () -> context); return temporalAttemptExecution.get(); } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/DbtLauncherWorker.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/DbtLauncherWorker.java index f70189986579..2a00a66f3f12 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/DbtLauncherWorker.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/DbtLauncherWorker.java @@ -10,8 +10,10 @@ import io.airbyte.scheduler.models.JobRunConfig; import io.airbyte.workers.WorkerApp; import io.airbyte.workers.WorkerConfigs; +import io.temporal.activity.ActivityExecutionContext; import java.util.Map; import java.util.UUID; +import java.util.function.Supplier; public class DbtLauncherWorker extends LauncherWorker { @@ -23,7 +25,8 @@ public DbtLauncherWorker(final UUID connectionId, final IntegrationLauncherConfig destinationLauncherConfig, final JobRunConfig jobRunConfig, final WorkerConfigs workerConfigs, - final WorkerApp.ContainerOrchestratorConfig containerOrchestratorConfig) { + final WorkerApp.ContainerOrchestratorConfig containerOrchestratorConfig, + final Supplier activityContext) { super( connectionId, DBT, @@ -33,7 +36,8 @@ public DbtLauncherWorker(final UUID connectionId, INIT_FILE_DESTINATION_LAUNCHER_CONFIG, Jsons.serialize(destinationLauncherConfig)), containerOrchestratorConfig, workerConfigs.getResourceRequirements(), - Void.class); + Void.class, + activityContext); } } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/DbtTransformationActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/DbtTransformationActivityImpl.java index 89f494ff81b4..3d1b89de3d10 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/DbtTransformationActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/DbtTransformationActivityImpl.java @@ -26,6 +26,8 @@ import io.airbyte.workers.temporal.CancellationHandler; import io.airbyte.workers.temporal.TemporalAttemptExecution; import io.airbyte.workers.temporal.TemporalUtils; +import io.temporal.activity.Activity; +import io.temporal.activity.ActivityExecutionContext; import java.io.IOException; import java.nio.file.Path; import java.util.Optional; @@ -71,34 +73,41 @@ public Void run(final JobRunConfig jobRunConfig, final IntegrationLauncherConfig destinationLauncherConfig, final ResourceRequirements resourceRequirements, final OperatorDbtInput input) { - return TemporalUtils.withBackgroundHeartbeat(() -> { - final var fullDestinationConfig = secretsHydrator.hydrate(input.getDestinationConfiguration()); - final var fullInput = Jsons.clone(input).withDestinationConfiguration(fullDestinationConfig); + final ActivityExecutionContext context = Activity.getExecutionContext(); + return TemporalUtils.withBackgroundHeartbeat( + () -> { + final var fullDestinationConfig = secretsHydrator.hydrate(input.getDestinationConfiguration()); + final var fullInput = Jsons.clone(input).withDestinationConfiguration(fullDestinationConfig); - final Supplier inputSupplier = () -> { - validator.ensureAsRuntime(ConfigSchema.OPERATOR_DBT_INPUT, Jsons.jsonNode(fullInput)); - return fullInput; - }; + final Supplier inputSupplier = () -> { + validator.ensureAsRuntime(ConfigSchema.OPERATOR_DBT_INPUT, Jsons.jsonNode(fullInput)); + return fullInput; + }; - final CheckedSupplier, Exception> workerFactory; + final CheckedSupplier, Exception> workerFactory; - if (containerOrchestratorConfig.isPresent()) { - workerFactory = getContainerLauncherWorkerFactory(workerConfigs, destinationLauncherConfig, jobRunConfig); - } else { - workerFactory = getLegacyWorkerFactory(destinationLauncherConfig, jobRunConfig, resourceRequirements); - } + if (containerOrchestratorConfig.isPresent()) { + workerFactory = + getContainerLauncherWorkerFactory(workerConfigs, destinationLauncherConfig, jobRunConfig, + () -> context); + } else { + workerFactory = getLegacyWorkerFactory(destinationLauncherConfig, jobRunConfig, resourceRequirements); + } - final TemporalAttemptExecution temporalAttemptExecution = new TemporalAttemptExecution<>( - workspaceRoot, workerEnvironment, logConfigs, - jobRunConfig, - workerFactory, - inputSupplier, - new CancellationHandler.TemporalCancellationHandler(), - jobPersistence, - airbyteVersion); + final TemporalAttemptExecution temporalAttemptExecution = + new TemporalAttemptExecution<>( + workspaceRoot, workerEnvironment, logConfigs, + jobRunConfig, + workerFactory, + inputSupplier, + new CancellationHandler.TemporalCancellationHandler(context), + jobPersistence, + airbyteVersion, + () -> context); - return temporalAttemptExecution.get(); - }); + return temporalAttemptExecution.get(); + }, + () -> context); } private CheckedSupplier, Exception> getLegacyWorkerFactory(final IntegrationLauncherConfig destinationLauncherConfig, @@ -120,7 +129,8 @@ private CheckedSupplier, Exception> getLegacyWork private CheckedSupplier, Exception> getContainerLauncherWorkerFactory( final WorkerConfigs workerConfigs, final IntegrationLauncherConfig destinationLauncherConfig, - final JobRunConfig jobRunConfig) + final JobRunConfig jobRunConfig, + final Supplier activityContext) throws IOException { final var jobScope = jobPersistence.getJob(Long.parseLong(jobRunConfig.getJobId())).getScope(); final var connectionId = UUID.fromString(jobScope); @@ -130,7 +140,8 @@ private CheckedSupplier, Exception> getContainerL destinationLauncherConfig, jobRunConfig, workerConfigs, - containerOrchestratorConfig.get()); + containerOrchestratorConfig.get(), + activityContext); } } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/LauncherWorker.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/LauncherWorker.java index e35dd5b40c00..fbeb3eed5fa6 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/LauncherWorker.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/LauncherWorker.java @@ -21,6 +21,7 @@ import io.fabric8.kubernetes.api.model.DeletionPropagation; import io.fabric8.kubernetes.api.model.Pod; import io.fabric8.kubernetes.client.KubernetesClientException; +import io.temporal.activity.ActivityExecutionContext; import java.nio.file.Path; import java.time.Duration; import java.util.HashMap; @@ -30,6 +31,7 @@ import java.util.concurrent.CancellationException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; @@ -54,6 +56,7 @@ public class LauncherWorker implements Worker { private final WorkerApp.ContainerOrchestratorConfig containerOrchestratorConfig; private final ResourceRequirements resourceRequirements; private final Class outputClass; + private final Supplier activityContext; private final AtomicBoolean cancelled = new AtomicBoolean(false); private AsyncOrchestratorPodProcess process; @@ -65,7 +68,8 @@ public LauncherWorker(final UUID connectionId, final Map additionalFileMap, final WorkerApp.ContainerOrchestratorConfig containerOrchestratorConfig, final ResourceRequirements resourceRequirements, - final Class outputClass) { + final Class outputClass, + final Supplier activityContext) { this.connectionId = connectionId; this.application = application; this.podNamePrefix = podNamePrefix; @@ -74,6 +78,7 @@ public LauncherWorker(final UUID connectionId, this.containerOrchestratorConfig = containerOrchestratorConfig; this.resourceRequirements = resourceRequirements; this.outputClass = outputClass; + this.activityContext = activityContext; } @Override @@ -172,7 +177,7 @@ public OUTPUT run(final INPUT input, final Path jobRoot) throws WorkerException throw new WorkerException("Running the launcher " + application + " failed", e); } } - }); + }, activityContext); } /** diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationActivityImpl.java index e2475a34fda7..9446a805aa80 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationActivityImpl.java @@ -25,6 +25,8 @@ import io.airbyte.workers.temporal.CancellationHandler; import io.airbyte.workers.temporal.TemporalAttemptExecution; import io.airbyte.workers.temporal.TemporalUtils; +import io.temporal.activity.Activity; +import io.temporal.activity.ActivityExecutionContext; import java.io.IOException; import java.nio.file.Path; import java.util.Optional; @@ -69,6 +71,7 @@ public NormalizationActivityImpl(final Optional { final var fullDestinationConfig = secretsHydrator.hydrate(input.getDestinationConfiguration()); final var fullInput = Jsons.clone(input).withDestinationConfiguration(fullDestinationConfig); @@ -81,7 +84,8 @@ public NormalizationSummary normalize(final JobRunConfig jobRunConfig, final CheckedSupplier, Exception> workerFactory; if (containerOrchestratorConfig.isPresent()) { - workerFactory = getContainerLauncherWorkerFactory(workerConfigs, destinationLauncherConfig, jobRunConfig); + workerFactory = getContainerLauncherWorkerFactory(workerConfigs, destinationLauncherConfig, jobRunConfig, + () -> context); } else { workerFactory = getLegacyWorkerFactory(workerConfigs, destinationLauncherConfig, jobRunConfig); } @@ -91,13 +95,14 @@ public NormalizationSummary normalize(final JobRunConfig jobRunConfig, jobRunConfig, workerFactory, inputSupplier, - new CancellationHandler.TemporalCancellationHandler(), + new CancellationHandler.TemporalCancellationHandler(context), jobPersistence, - airbyteVersion); + airbyteVersion, + () -> context); - final NormalizationSummary normalizationSummary = temporalAttemptExecution.get(); - return normalizationSummary; - }); + return temporalAttemptExecution.get(); + }, + () -> context); } private CheckedSupplier, Exception> getLegacyWorkerFactory( @@ -118,7 +123,8 @@ private CheckedSupplier, Except private CheckedSupplier, Exception> getContainerLauncherWorkerFactory( final WorkerConfigs workerConfigs, final IntegrationLauncherConfig destinationLauncherConfig, - final JobRunConfig jobRunConfig) + final JobRunConfig jobRunConfig, + final Supplier activityContext) throws IOException { final var jobScope = jobPersistence.getJob(Long.parseLong(jobRunConfig.getJobId())).getScope(); final var connectionId = UUID.fromString(jobScope); @@ -128,7 +134,8 @@ private CheckedSupplier, Except destinationLauncherConfig, jobRunConfig, workerConfigs, - containerOrchestratorConfig.get()); + containerOrchestratorConfig.get(), + activityContext); } } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationLauncherWorker.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationLauncherWorker.java index 5dea8dfcb53e..ab07f44ceb65 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationLauncherWorker.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationLauncherWorker.java @@ -11,8 +11,10 @@ import io.airbyte.scheduler.models.JobRunConfig; import io.airbyte.workers.WorkerApp; import io.airbyte.workers.WorkerConfigs; +import io.temporal.activity.ActivityExecutionContext; import java.util.Map; import java.util.UUID; +import java.util.function.Supplier; public class NormalizationLauncherWorker extends LauncherWorker { @@ -24,7 +26,8 @@ public NormalizationLauncherWorker(final UUID connectionId, final IntegrationLauncherConfig destinationLauncherConfig, final JobRunConfig jobRunConfig, final WorkerConfigs workerConfigs, - final WorkerApp.ContainerOrchestratorConfig containerOrchestratorConfig) { + final WorkerApp.ContainerOrchestratorConfig containerOrchestratorConfig, + final Supplier activityContext) { super( connectionId, NORMALIZATION, @@ -34,7 +37,9 @@ public NormalizationLauncherWorker(final UUID connectionId, INIT_FILE_DESTINATION_LAUNCHER_CONFIG, Jsons.serialize(destinationLauncherConfig)), containerOrchestratorConfig, workerConfigs.getResourceRequirements(), - NormalizationSummary.class); + NormalizationSummary.class, + activityContext); + } } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationActivityImpl.java index 656a30c79f77..33f063bb781a 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationActivityImpl.java @@ -38,6 +38,8 @@ import io.airbyte.workers.temporal.CancellationHandler; import io.airbyte.workers.temporal.TemporalAttemptExecution; import io.airbyte.workers.temporal.TemporalUtils; +import io.temporal.activity.Activity; +import io.temporal.activity.ActivityExecutionContext; import java.io.IOException; import java.nio.file.Path; import java.util.Optional; @@ -103,51 +105,58 @@ public StandardSyncOutput replicate(final JobRunConfig jobRunConfig, final IntegrationLauncherConfig sourceLauncherConfig, final IntegrationLauncherConfig destinationLauncherConfig, final StandardSyncInput syncInput) { - return TemporalUtils.withBackgroundHeartbeat(() -> { - - final var fullSourceConfig = secretsHydrator.hydrate(syncInput.getSourceConfiguration()); - final var fullDestinationConfig = secretsHydrator.hydrate(syncInput.getDestinationConfiguration()); - - final var fullSyncInput = Jsons.clone(syncInput) - .withSourceConfiguration(fullSourceConfig) - .withDestinationConfiguration(fullDestinationConfig); - - final Supplier inputSupplier = () -> { - validator.ensureAsRuntime(ConfigSchema.STANDARD_SYNC_INPUT, Jsons.jsonNode(fullSyncInput)); - return fullSyncInput; - }; - - final CheckedSupplier, Exception> workerFactory; - - if (containerOrchestratorConfig.isPresent()) { - workerFactory = getContainerLauncherWorkerFactory( - containerOrchestratorConfig.get(), - sourceLauncherConfig, - destinationLauncherConfig, - jobRunConfig, - syncInput.getResourceRequirements()); - } else { - workerFactory = getLegacyWorkerFactory(sourceLauncherConfig, destinationLauncherConfig, jobRunConfig, syncInput); - } - - final TemporalAttemptExecution temporalAttempt = new TemporalAttemptExecution<>( - workspaceRoot, - workerEnvironment, - logConfigs, - jobRunConfig, - workerFactory, - inputSupplier, - new CancellationHandler.TemporalCancellationHandler(), - jobPersistence, - airbyteVersion); - - final ReplicationOutput attemptOutput = temporalAttempt.get(); - final StandardSyncOutput standardSyncOutput = reduceReplicationOutput(attemptOutput); - - LOGGER.info("sync summary: {}", standardSyncOutput); - - return standardSyncOutput; - }); + final ActivityExecutionContext context = Activity.getExecutionContext(); + return TemporalUtils.withBackgroundHeartbeat( + () -> { + + final var fullSourceConfig = secretsHydrator.hydrate(syncInput.getSourceConfiguration()); + final var fullDestinationConfig = secretsHydrator.hydrate(syncInput.getDestinationConfiguration()); + + final var fullSyncInput = Jsons.clone(syncInput) + .withSourceConfiguration(fullSourceConfig) + .withDestinationConfiguration(fullDestinationConfig); + + final Supplier inputSupplier = () -> { + validator.ensureAsRuntime(ConfigSchema.STANDARD_SYNC_INPUT, Jsons.jsonNode(fullSyncInput)); + return fullSyncInput; + }; + + final CheckedSupplier, Exception> workerFactory; + + if (containerOrchestratorConfig.isPresent()) { + workerFactory = getContainerLauncherWorkerFactory( + containerOrchestratorConfig.get(), + sourceLauncherConfig, + destinationLauncherConfig, + jobRunConfig, + syncInput.getResourceRequirements(), + () -> context); + } else { + workerFactory = + getLegacyWorkerFactory(sourceLauncherConfig, destinationLauncherConfig, jobRunConfig, syncInput); + } + + final TemporalAttemptExecution temporalAttempt = + new TemporalAttemptExecution<>( + workspaceRoot, + workerEnvironment, + logConfigs, + jobRunConfig, + workerFactory, + inputSupplier, + new CancellationHandler.TemporalCancellationHandler(context), + jobPersistence, + airbyteVersion, + () -> context); + + final ReplicationOutput attemptOutput = temporalAttempt.get(); + final StandardSyncOutput standardSyncOutput = reduceReplicationOutput(attemptOutput); + + LOGGER.info("sync summary: {}", standardSyncOutput); + + return standardSyncOutput; + }, + () -> context); } private static StandardSyncOutput reduceReplicationOutput(final ReplicationOutput output) { @@ -205,11 +214,13 @@ private CheckedSupplier, Exception> }; } - private CheckedSupplier, Exception> getContainerLauncherWorkerFactory(final ContainerOrchestratorConfig containerOrchestratorConfig, + private CheckedSupplier, Exception> getContainerLauncherWorkerFactory( + final ContainerOrchestratorConfig containerOrchestratorConfig, final IntegrationLauncherConfig sourceLauncherConfig, final IntegrationLauncherConfig destinationLauncherConfig, final JobRunConfig jobRunConfig, - final ResourceRequirements resourceRequirements) + final ResourceRequirements resourceRequirements, + final Supplier activityContext) throws IOException { final var jobScope = jobPersistence.getJob(Long.parseLong(jobRunConfig.getJobId())).getScope(); @@ -221,7 +232,8 @@ private CheckedSupplier, Exception> sourceLauncherConfig, destinationLauncherConfig, jobRunConfig, - resourceRequirements); + resourceRequirements, + activityContext); } } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationLauncherWorker.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationLauncherWorker.java index 6cfb706f70df..84956d2a4ede 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationLauncherWorker.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationLauncherWorker.java @@ -11,8 +11,10 @@ import io.airbyte.scheduler.models.IntegrationLauncherConfig; import io.airbyte.scheduler.models.JobRunConfig; import io.airbyte.workers.WorkerApp; +import io.temporal.activity.ActivityExecutionContext; import java.util.Map; import java.util.UUID; +import java.util.function.Supplier; /** * Launches a container-orchestrator container/pod to manage the message passing for the replication @@ -31,7 +33,8 @@ public ReplicationLauncherWorker(final UUID connectionId, final IntegrationLauncherConfig sourceLauncherConfig, final IntegrationLauncherConfig destinationLauncherConfig, final JobRunConfig jobRunConfig, - final ResourceRequirements resourceRequirements) { + final ResourceRequirements resourceRequirements, + final Supplier activityContext) { super( connectionId, REPLICATION, @@ -42,7 +45,8 @@ public ReplicationLauncherWorker(final UUID connectionId, INIT_FILE_DESTINATION_LAUNCHER_CONFIG, Jsons.serialize(destinationLauncherConfig)), containerOrchestratorConfig, resourceRequirements, - ReplicationOutput.class); + ReplicationOutput.class, + activityContext); } } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/CancellationHandlerTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/CancellationHandlerTest.java new file mode 100644 index 000000000000..fd13db534864 --- /dev/null +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/CancellationHandlerTest.java @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.temporal; + +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; + +import io.temporal.activity.Activity; +import io.temporal.activity.ActivityExecutionContext; +import io.temporal.client.WorkflowClient; +import io.temporal.client.WorkflowOptions; +import io.temporal.testing.TestWorkflowEnvironment; +import io.temporal.worker.Worker; +import org.junit.jupiter.api.Test; + +class CancellationHandlerTest { + + @Test + void testCancellationHandler() { + + final TestWorkflowEnvironment testEnv = TestWorkflowEnvironment.newInstance(); + + final Worker worker = testEnv.newWorker("task-queue"); + + worker.registerWorkflowImplementationTypes(HeartbeatWorkflow.HeartbeatWorkflowImpl.class); + final WorkflowClient client = testEnv.getWorkflowClient(); + + worker.registerActivitiesImplementations(new HeartbeatWorkflow.HeartbeatActivityImpl(() -> { + ActivityExecutionContext context = Activity.getExecutionContext(); + new CancellationHandler.TemporalCancellationHandler(context).checkAndHandleCancellation(() -> {}); + })); + + testEnv.start(); + + final HeartbeatWorkflow heartbeatWorkflow = client.newWorkflowStub( + HeartbeatWorkflow.class, + WorkflowOptions.newBuilder() + .setTaskQueue("task-queue") + .build()); + + assertDoesNotThrow(heartbeatWorkflow::execute); + + } + +} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/HeartbeatWorkflow.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/HeartbeatWorkflow.java new file mode 100644 index 000000000000..8047dd729ef2 --- /dev/null +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/HeartbeatWorkflow.java @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.temporal; + +import io.temporal.activity.ActivityCancellationType; +import io.temporal.activity.ActivityInterface; +import io.temporal.activity.ActivityMethod; +import io.temporal.activity.ActivityOptions; +import io.temporal.workflow.Workflow; +import io.temporal.workflow.WorkflowInterface; +import io.temporal.workflow.WorkflowMethod; +import java.time.Duration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@WorkflowInterface +public interface HeartbeatWorkflow { + + @WorkflowMethod + void execute(); + + class HeartbeatWorkflowImpl implements HeartbeatWorkflow { + + private final ActivityOptions options = ActivityOptions.newBuilder() + .setScheduleToCloseTimeout(Duration.ofDays(1)) + .setCancellationType(ActivityCancellationType.WAIT_CANCELLATION_COMPLETED) + .setRetryOptions(TemporalUtils.NO_RETRY) + .build(); + + private final HeartbeatActivity heartbeatActivity = Workflow.newActivityStub(HeartbeatActivity.class, options); + + @Override + public void execute() { + heartbeatActivity.heartbeat(); + } + + } + + @ActivityInterface + interface HeartbeatActivity { + + @ActivityMethod + void heartbeat(); + + } + + class HeartbeatActivityImpl implements HeartbeatActivity { + + private static final Logger LOGGER = LoggerFactory.getLogger(HeartbeatActivityImpl.class); + + private final Runnable runnable; + + public HeartbeatActivityImpl(Runnable runnable) { + this.runnable = runnable; + } + + @Override + public void heartbeat() { + runnable.run(); + } + + } + +} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/TemporalUtilsTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/TemporalUtilsTest.java index 9e35ccbadda7..d1797f9a8859 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/TemporalUtilsTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/TemporalUtilsTest.java @@ -5,7 +5,9 @@ package io.airbyte.workers.temporal; import static io.airbyte.workers.temporal.TemporalUtils.getTemporalClientWhenConnected; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -13,7 +15,9 @@ import io.airbyte.commons.concurrency.VoidCallable; import io.airbyte.workers.WorkerException; +import io.temporal.activity.Activity; import io.temporal.activity.ActivityCancellationType; +import io.temporal.activity.ActivityExecutionContext; import io.temporal.activity.ActivityInterface; import io.temporal.activity.ActivityMethod; import io.temporal.activity.ActivityOptions; @@ -44,7 +48,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class TemporalUtilsTest { +class TemporalUtilsTest { private static final String TASK_QUEUE = "default"; @@ -91,7 +95,7 @@ void testAsyncExecute() throws Exception { } @Test - public void testWaitForTemporalServerAndLogThrowsException() { + void testWaitForTemporalServerAndLogThrowsException() { final WorkflowServiceStubs workflowServiceStubs = mock(WorkflowServiceStubs.class, Mockito.RETURNS_DEEP_STUBS); final DescribeNamespaceResponse describeNamespaceResponse = mock(DescribeNamespaceResponse.class); final NamespaceInfo namespaceInfo = mock(NamespaceInfo.class); @@ -109,7 +113,7 @@ public void testWaitForTemporalServerAndLogThrowsException() { } @Test - public void testWaitThatTimesOut() { + void testWaitThatTimesOut() { final WorkflowServiceStubs workflowServiceStubs = mock(WorkflowServiceStubs.class, Mockito.RETURNS_DEEP_STUBS); final DescribeNamespaceResponse describeNamespaceResponse = mock(DescribeNamespaceResponse.class); final NamespaceInfo namespaceInfo = mock(NamespaceInfo.class); @@ -129,7 +133,7 @@ public void testWaitThatTimesOut() { } @Test - public void testRuntimeExceptionOnHeartbeatWrapper() { + void testRuntimeExceptionOnHeartbeatWrapper() { final TestWorkflowEnvironment testEnv = TestWorkflowEnvironment.newInstance(); final Worker worker = testEnv.newWorker(TASK_QUEUE); worker.registerWorkflowImplementationTypes(TestFailingWorkflow.WorkflowImpl.class); @@ -151,7 +155,7 @@ public void testRuntimeExceptionOnHeartbeatWrapper() { } @Test - public void testWorkerExceptionOnHeartbeatWrapper() { + void testWorkerExceptionOnHeartbeatWrapper() { final TestWorkflowEnvironment testEnv = TestWorkflowEnvironment.newInstance(); final Worker worker = testEnv.newWorker(TASK_QUEUE); worker.registerWorkflowImplementationTypes(TestFailingWorkflow.WorkflowImpl.class); @@ -164,14 +168,95 @@ public void testWorkerExceptionOnHeartbeatWrapper() { client.newWorkflowStub(TestFailingWorkflow.class, WorkflowOptions.newBuilder().setTaskQueue(TASK_QUEUE).build()); // throws workerexception wrapped in a WorkflowFailedException - assertThrows(WorkflowFailedException.class, () -> { - workflowStub.run("worker"); - }); + assertThrows(WorkflowFailedException.class, () -> workflowStub.run("worker")); // we should never retry enough to reach the end assertEquals(0, timesReachedEnd.get()); } + @Test + void testHeartbeatWithContext() throws InterruptedException { + + final TestWorkflowEnvironment testEnv = TestWorkflowEnvironment.newInstance(); + + final Worker worker = testEnv.newWorker(TASK_QUEUE); + + worker.registerWorkflowImplementationTypes(HeartbeatWorkflow.HeartbeatWorkflowImpl.class); + final WorkflowClient client = testEnv.getWorkflowClient(); + + final CountDownLatch latch = new CountDownLatch(2); + + worker.registerActivitiesImplementations(new HeartbeatWorkflow.HeartbeatActivityImpl(() -> { + ActivityExecutionContext context = Activity.getExecutionContext(); + TemporalUtils.withBackgroundHeartbeat( + // TODO (itaseski) figure out how to decrease heartbeat intervals using reflection + () -> { + latch.await(); + return new Object(); + }, + () -> { + latch.countDown(); + return context; + }); + })); + + testEnv.start(); + + final HeartbeatWorkflow heartbeatWorkflow = client.newWorkflowStub( + HeartbeatWorkflow.class, + WorkflowOptions.newBuilder() + .setTaskQueue(TASK_QUEUE) + .build()); + + // use async execution to avoid blocking the test thread + WorkflowClient.start(heartbeatWorkflow::execute); + + assertTrue(latch.await(15, TimeUnit.SECONDS)); + + } + + @Test + void testHeartbeatWithContextAndCallbackRef() throws InterruptedException { + + final TestWorkflowEnvironment testEnv = TestWorkflowEnvironment.newInstance(); + + final Worker worker = testEnv.newWorker(TASK_QUEUE); + + worker.registerWorkflowImplementationTypes(HeartbeatWorkflow.HeartbeatWorkflowImpl.class); + final WorkflowClient client = testEnv.getWorkflowClient(); + + final CountDownLatch latch = new CountDownLatch(2); + + worker.registerActivitiesImplementations(new HeartbeatWorkflow.HeartbeatActivityImpl(() -> { + ActivityExecutionContext context = Activity.getExecutionContext(); + TemporalUtils.withBackgroundHeartbeat( + // TODO (itaseski) figure out how to decrease heartbeat intervals using reflection + new AtomicReference<>(() -> {}), + () -> { + latch.await(); + return new Object(); + }, + () -> { + latch.countDown(); + return context; + }); + })); + + testEnv.start(); + + final HeartbeatWorkflow heartbeatWorkflow = client.newWorkflowStub( + HeartbeatWorkflow.class, + WorkflowOptions.newBuilder() + .setTaskQueue(TASK_QUEUE) + .build()); + + // use async execution to avoid blocking the test thread + WorkflowClient.start(heartbeatWorkflow::execute); + + assertTrue(latch.await(15, TimeUnit.SECONDS)); + + } + @WorkflowInterface public interface TestWorkflow { @@ -291,20 +376,24 @@ public Activity1Impl(final AtomicInteger timesReachedEnd) { public void activity(String arg) { LOGGER.info("before: {}", ACTIVITY1); - TemporalUtils.withBackgroundHeartbeat(new AtomicReference<>(null), () -> { - if (timesReachedEnd.get() == 0) { - if (arg.equals("runtime")) { - throw new RuntimeException("failed"); - } else if (arg.equals("timeout")) { - Thread.sleep(10000); - return null; - } else { - throw new WorkerException("failed"); - } - } else { - return null; - } - }); + ActivityExecutionContext context = Activity.getExecutionContext(); + TemporalUtils.withBackgroundHeartbeat( + new AtomicReference<>(null), + () -> { + if (timesReachedEnd.get() == 0) { + if (arg.equals("runtime")) { + throw new RuntimeException("failed"); + } else if (arg.equals("timeout")) { + Thread.sleep(10000); + return null; + } else { + throw new WorkerException("failed"); + } + } else { + return null; + } + }, + () -> context); timesReachedEnd.incrementAndGet(); LOGGER.info("before: {}", ACTIVITY1); } From 2fae6e3e04d1899388e1b220a52558d03d3a0134 Mon Sep 17 00:00:00 2001 From: Luke Alexander Date: Tue, 26 Apr 2022 23:19:33 +0100 Subject: [PATCH 16/39] helm chart: ensure correct syntax for for kube job tolerations env var (#12313) --- charts/airbyte/templates/_helpers.tpl | 11 +++++++++++ charts/airbyte/templates/env-configmap.yaml | 2 +- charts/airbyte/templates/worker/deployment.yaml | 12 ++++++------ charts/airbyte/values.yaml | 5 +++++ 4 files changed, 23 insertions(+), 7 deletions(-) diff --git a/charts/airbyte/templates/_helpers.tpl b/charts/airbyte/templates/_helpers.tpl index a9db9ae199c7..333a0c16e9a4 100644 --- a/charts/airbyte/templates/_helpers.tpl +++ b/charts/airbyte/templates/_helpers.tpl @@ -238,3 +238,14 @@ Construct comma separated list of key/value pairs from object (useful for ENV va {{- end -}} {{ join "," $kvList }} {{- end -}} + +{{/* +Construct semi-colon delimited list of comma separated key/value pairs from array of objects (useful for ENV var values) +*/}} +{{- define "airbyte.flattenArrayMap" -}} +{{- $mapList := list -}} +{{- range $element := . -}} +{{- $mapList = include "airbyte.flattenMap" $element | mustAppend $mapList -}} +{{- end -}} +{{ join ";" $mapList }} +{{- end -}} diff --git a/charts/airbyte/templates/env-configmap.yaml b/charts/airbyte/templates/env-configmap.yaml index e4eeb215a024..831907a90d2e 100644 --- a/charts/airbyte/templates/env-configmap.yaml +++ b/charts/airbyte/templates/env-configmap.yaml @@ -28,7 +28,7 @@ data: JOB_KUBE_NODE_SELECTORS: {{ $.Values.jobs.kube.nodeSelector | include "airbyte.flattenMap" | quote }} {{- end }} {{- if $.Values.jobs.kube.tolerations }} - JOB_KUBE_TOLERATIONS: {{ $.Values.jobs.kube.tolerations | include "airbyte.flattenMap" | quote }} + JOB_KUBE_TOLERATIONS: {{ $.Values.jobs.kube.tolerations | include "airbyte.flattenArrayMap" | quote }} {{- end }} JOB_MAIN_CONTAINER_CPU_LIMIT: {{ ((.Values.jobs.resources | default dict).limits | default dict).cpu | default "" | quote }} JOB_MAIN_CONTAINER_CPU_REQUEST: {{ ((.Values.jobs.resources | default dict).requests | default dict).cpu | default "" | quote }} diff --git a/charts/airbyte/templates/worker/deployment.yaml b/charts/airbyte/templates/worker/deployment.yaml index 655dd506bac0..826f2b6d1066 100644 --- a/charts/airbyte/templates/worker/deployment.yaml +++ b/charts/airbyte/templates/worker/deployment.yaml @@ -112,27 +112,27 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace -{{- if $.Values.jobs.kube.annotations }} + {{- if $.Values.jobs.kube.annotations }} - name: JOB_KUBE_ANNOTATIONS valueFrom: configMapKeyRef: name: airbyte-env key: JOB_KUBE_ANNOTATIONS -{{- end }} -{{- if $.Values.jobs.kube.nodeSelector }} + {{- end }} + {{- if $.Values.jobs.kube.nodeSelector }} - name: JOB_KUBE_NODE_SELECTORS valueFrom: configMapKeyRef: name: airbyte-env key: JOB_KUBE_NODE_SELECTORS -{{- end }} -{{- if $.Values.jobs.kube.tolerations }} + {{- end }} + {{- if $.Values.jobs.kube.tolerations }} - name: JOB_KUBE_TOLERATIONS valueFrom: configMapKeyRef: name: airbyte-env key: JOB_KUBE_TOLERATIONS -{{- end }} + {{- end }} - name: SUBMITTER_NUM_THREADS valueFrom: configMapKeyRef: diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 7d90effb8bcd..4a19a4c4b0a2 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -976,4 +976,9 @@ jobs: ## JOB_KUBE_TOLERATIONS ## @param jobs.kube.tolerations [array] Tolerations for jobs.kube pod assignment. ## ref: https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/ + ## any boolean values should be quoted to ensure the value is passed through as a string, eg: + ## - key: airbyte-server + ## operator: Equal + ## value: "true" + ## effect: NoSchedule tolerations: [] From 68b51916f55295296597d5200a46ea85b53d80f4 Mon Sep 17 00:00:00 2001 From: Ivica Taseski Date: Wed, 27 Apr 2022 01:57:38 +0200 Subject: [PATCH 17/39] =?UTF-8?q?=F0=9F=8E=89=20Source=20Pipedrive:=203=20?= =?UTF-8?q?new=20streams=20=20DealFields,=20OrganizationFields,=20PersonFi?= =?UTF-8?q?elds.=20(#11870)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add support for custom fields * add spacing * remove none primary key * correct test * format code * correct schema * bump connector version * remove enum Co-authored-by: marcosmarxm --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 2 +- .../connectors/source-pipedrive/Dockerfile | 2 +- .../integration_tests/configured_catalog.json | 36 +++++++++ .../source_pipedrive/schemas/deal_fields.json | 79 +++++++++++++++++++ .../schemas/organization_fields.json | 79 +++++++++++++++++++ .../schemas/person_fields.json | 79 +++++++++++++++++++ .../source_pipedrive/source.py | 18 ++++- .../source_pipedrive/spec.json | 4 - .../source_pipedrive/streams.py | 14 +++- docs/integrations/sources/pipedrive.md | 1 + 11 files changed, 306 insertions(+), 10 deletions(-) create mode 100644 airbyte-integrations/connectors/source-pipedrive/source_pipedrive/schemas/deal_fields.json create mode 100644 airbyte-integrations/connectors/source-pipedrive/source_pipedrive/schemas/organization_fields.json create mode 100644 airbyte-integrations/connectors/source-pipedrive/source_pipedrive/schemas/person_fields.json diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 0904622573af..a021f9fce2f3 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -591,7 +591,7 @@ - name: Pipedrive sourceDefinitionId: d8286229-c680-4063-8c59-23b9b391c700 dockerRepository: airbyte/source-pipedrive - dockerImageTag: 0.1.9 + dockerImageTag: 0.1.10 documentationUrl: https://docs.airbyte.io/integrations/sources/pipedrive icon: pipedrive.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 0db0866bfa11..d69af5d6f599 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -6266,7 +6266,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-pipedrive:0.1.9" +- dockerImage: "airbyte/source-pipedrive:0.1.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/pipedrive" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-pipedrive/Dockerfile b/airbyte-integrations/connectors/source-pipedrive/Dockerfile index 16d99ef08ea5..7c846e53ef28 100644 --- a/airbyte-integrations/connectors/source-pipedrive/Dockerfile +++ b/airbyte-integrations/connectors/source-pipedrive/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.9 +LABEL io.airbyte.version=0.1.10 LABEL io.airbyte.name=airbyte/source-pipedrive diff --git a/airbyte-integrations/connectors/source-pipedrive/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-pipedrive/integration_tests/configured_catalog.json index a4bce64cbe7d..a827c144ec2d 100644 --- a/airbyte-integrations/connectors/source-pipedrive/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-pipedrive/integration_tests/configured_catalog.json @@ -101,6 +101,42 @@ "sync_mode": "incremental", "cursor_field": ["modified"], "destination_sync_mode": "append" + }, + { + "stream": { + "name": "deal_fields", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["update_time"] + }, + "sync_mode": "incremental", + "cursor_field": ["update_time"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "organization_fields", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["update_time"] + }, + "sync_mode": "incremental", + "cursor_field": ["update_time"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "person_fields", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["update_time"] + }, + "sync_mode": "incremental", + "cursor_field": ["update_time"], + "destination_sync_mode": "append" } ] } diff --git a/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/schemas/deal_fields.json b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/schemas/deal_fields.json new file mode 100644 index 000000000000..0f855dd4a329 --- /dev/null +++ b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/schemas/deal_fields.json @@ -0,0 +1,79 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "key": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "order_nr": { + "type": ["null", "integer"] + }, + "field_type": { + "type": ["null", "string"] + }, + "add_time": { + "type": ["null", "string"], + "format": "date-time" + }, + "update_time": { + "type": ["null", "string"], + "format": "date-time" + }, + "last_updated_by_user_id": { + "type": ["null", "integer"] + }, + "active_flag": { + "type": ["null", "boolean"] + }, + "edit_flag": { + "type": ["null", "boolean"] + }, + "index_visible_flag": { + "type": ["null", "boolean"] + }, + "details_visible_flag": { + "type": ["null", "boolean"] + }, + "add_visible_flag": { + "type": ["null", "boolean"] + }, + "important_flag": { + "type": ["null", "boolean"] + }, + "bulk_edit_allowed": { + "type": ["null", "boolean"] + }, + "searchable_flag": { + "type": ["null", "boolean"] + }, + "filtering_allowed": { + "type": ["null", "boolean"] + }, + "sortable_flag": { + "type": ["null", "boolean"] + }, + "options": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string", "integer"] + }, + "label": { + "type": ["null", "string"] + } + } + } + }, + "mandatory_flag": { + "type": ["null", "string", "boolean", "object"] + } + } +} diff --git a/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/schemas/organization_fields.json b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/schemas/organization_fields.json new file mode 100644 index 000000000000..10406b6f63ba --- /dev/null +++ b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/schemas/organization_fields.json @@ -0,0 +1,79 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "key": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "order_nr": { + "type": ["null", "integer"] + }, + "field_type": { + "type": ["null", "string"] + }, + "add_time": { + "type": ["null", "string"], + "format": "date-time" + }, + "update_time": { + "type": ["null", "string"], + "format": "date-time" + }, + "last_updated_by_user_id": { + "type": ["null", "integer"] + }, + "active_flag": { + "type": ["null", "boolean"] + }, + "edit_flag": { + "type": ["null", "boolean"] + }, + "index_visible_flag": { + "type": ["null", "boolean"] + }, + "details_visible_flag": { + "type": ["null", "boolean"] + }, + "add_visible_flag": { + "type": ["null", "boolean"] + }, + "important_flag": { + "type": ["null", "boolean"] + }, + "bulk_edit_allowed": { + "type": ["null", "boolean"] + }, + "searchable_flag": { + "type": ["null", "boolean"] + }, + "filtering_allowed": { + "type": ["null", "boolean"] + }, + "sortable_flag": { + "type": ["null", "boolean"] + }, + "options": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "integer", "string", "boolean"] + }, + "label": { + "type": ["null", "string"] + } + } + } + }, + "mandatory_flag": { + "type": ["null", "boolean"] + } + } +} diff --git a/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/schemas/person_fields.json b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/schemas/person_fields.json new file mode 100644 index 000000000000..10406b6f63ba --- /dev/null +++ b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/schemas/person_fields.json @@ -0,0 +1,79 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "key": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "order_nr": { + "type": ["null", "integer"] + }, + "field_type": { + "type": ["null", "string"] + }, + "add_time": { + "type": ["null", "string"], + "format": "date-time" + }, + "update_time": { + "type": ["null", "string"], + "format": "date-time" + }, + "last_updated_by_user_id": { + "type": ["null", "integer"] + }, + "active_flag": { + "type": ["null", "boolean"] + }, + "edit_flag": { + "type": ["null", "boolean"] + }, + "index_visible_flag": { + "type": ["null", "boolean"] + }, + "details_visible_flag": { + "type": ["null", "boolean"] + }, + "add_visible_flag": { + "type": ["null", "boolean"] + }, + "important_flag": { + "type": ["null", "boolean"] + }, + "bulk_edit_allowed": { + "type": ["null", "boolean"] + }, + "searchable_flag": { + "type": ["null", "boolean"] + }, + "filtering_allowed": { + "type": ["null", "boolean"] + }, + "sortable_flag": { + "type": ["null", "boolean"] + }, + "options": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "integer", "string", "boolean"] + }, + "label": { + "type": ["null", "string"] + } + } + } + }, + "mandatory_flag": { + "type": ["null", "boolean"] + } + } +} diff --git a/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/source.py b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/source.py index 6e0e9500a112..74a11582bf74 100644 --- a/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/source.py +++ b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/source.py @@ -11,7 +11,20 @@ from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.streams import Stream from airbyte_cdk.sources.streams.http.auth import Oauth2Authenticator -from source_pipedrive.streams import Activities, ActivityFields, Deals, Leads, Organizations, Persons, Pipelines, Stages, Users +from source_pipedrive.streams import ( + Activities, + ActivityFields, + DealFields, + Deals, + Leads, + OrganizationFields, + Organizations, + PersonFields, + Persons, + Pipelines, + Stages, + Users, +) class SourcePipedrive(AbstractSource): @@ -35,9 +48,12 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: Activities(**incremental_kwargs), ActivityFields(**stream_kwargs), Deals(**incremental_kwargs), + DealFields(**stream_kwargs), Leads(**stream_kwargs), Organizations(**incremental_kwargs), + OrganizationFields(**stream_kwargs), Persons(**incremental_kwargs), + PersonFields(**stream_kwargs), Pipelines(**incremental_kwargs), Stages(**incremental_kwargs), Users(**incremental_kwargs), diff --git a/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/spec.json b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/spec.json index 86f9e47a0ca1..d03f5c6e99e7 100644 --- a/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/spec.json +++ b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/spec.json @@ -25,8 +25,6 @@ "auth_type": { "type": "string", "const": "Client", - "enum": ["Client"], - "default": "Client", "order": 0 }, "client_id": { @@ -57,8 +55,6 @@ "auth_type": { "type": "string", "const": "Token", - "enum": ["Token"], - "default": "Token", "order": 0 }, "api_token": { diff --git a/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/streams.py b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/streams.py index ce4cb83587ad..e6935241c5b7 100755 --- a/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/streams.py +++ b/airbyte-integrations/connectors/source-pipedrive/source_pipedrive/streams.py @@ -110,6 +110,10 @@ class Deals(PipedriveStream): """ +class DealFields(PipedriveStream): + """https://developers.pipedrive.com/docs/api/v1/DealFields#getDealFields""" + + class Leads(PipedriveStream): """https://developers.pipedrive.com/docs/api/v1/Leads#getLeads""" @@ -126,8 +130,6 @@ class Activities(PipedriveStream): class ActivityFields(PipedriveStream): """https://developers.pipedrive.com/docs/api/v1/ActivityFields#getActivityFields""" - primary_key = None - class Organizations(PipedriveStream): """ @@ -136,6 +138,10 @@ class Organizations(PipedriveStream): """ +class OrganizationFields(PipedriveStream): + """https://developers.pipedrive.com/docs/api/v1/OrganizationFields#getOrganizationFields""" + + class Persons(PipedriveStream): """ API docs: https://developers.pipedrive.com/docs/api/v1/Persons#getPersons, @@ -143,6 +149,10 @@ class Persons(PipedriveStream): """ +class PersonFields(PipedriveStream): + """https://developers.pipedrive.com/docs/api/v1/PersonFields#getPersonFields""" + + class Pipelines(PipedriveStream): """ API docs: https://developers.pipedrive.com/docs/api/v1/Pipelines#getPipelines, diff --git a/docs/integrations/sources/pipedrive.md b/docs/integrations/sources/pipedrive.md index b9da3adea186..82ec698a45e6 100644 --- a/docs/integrations/sources/pipedrive.md +++ b/docs/integrations/sources/pipedrive.md @@ -87,6 +87,7 @@ See [How to find the API token](https://pipedrive.readme.io/docs/how-to-find-the | Version | Date | Pull Request | Subject | | :------ | :-------- | :----- | :------ | +| 0.1.10 | 2022-04-26 | [11870](https://github.com/airbytehq/airbyte/pull/11870) | Add 3 streams: DealFields, OrganizationFields and PersonFields | | 0.1.9 | 2021-12-07 | [8582](https://github.com/airbytehq/airbyte/pull/8582) | Update connector fields title/description | | 0.1.8 | 2021-11-16 | [7875](https://github.com/airbytehq/airbyte/pull/7875) | Extend schema for "persons" stream | | 0.1.7 | 2021-11-15 | [7968](https://github.com/airbytehq/airbyte/pull/7968) | Update oAuth flow config | From c856d7953db82e60b919f8fb771c0acd50e20685 Mon Sep 17 00:00:00 2001 From: terencecho Date: Tue, 26 Apr 2022 17:35:10 -0700 Subject: [PATCH 18/39] Adjust auto-disable connection logic for spam (#12288) * Adjust auto-disable connection logic for spam * refactor for readability --- .../AutoDisableConnectionActivityImpl.java | 46 +++++++++---------- .../AutoDisableConnectionActivityTest.java | 27 +++++++++-- 2 files changed, 44 insertions(+), 29 deletions(-) diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java index 5502fc1a7a82..80e387c1b0fa 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityImpl.java @@ -56,6 +56,8 @@ public AutoDisableConnectionOutput autoDisableFailingConnection(final AutoDisabl final long currTimestampInSeconds = input.getCurrTimestamp().getEpochSecond(); final Job lastJob = jobPersistence.getLastReplicationJob(input.getConnectionId()) .orElseThrow(() -> new Exception("Auto-Disable Connection should not have been attempted if can't get latest replication job.")); + final Job firstJob = jobPersistence.getFirstReplicationJob(input.getConnectionId()) + .orElseThrow(() -> new Exception("Auto-Disable Connection should not have been attempted if no replication job has been run.")); final List jobs = jobPersistence.listJobStatusAndTimestampWithConnection(input.getConnectionId(), REPLICATION_TYPES, input.getCurrTimestamp().minus(maxDaysOfOnlyFailedJobs, DAYS)); @@ -73,13 +75,16 @@ public AutoDisableConnectionOutput autoDisableFailingConnection(final AutoDisabl } } + final boolean warningPreviouslySentForMaxDays = + warningPreviouslySentForMaxDays(numFailures, successTimestamp, maxDaysOfOnlyFailedJobsBeforeWarning, firstJob, jobs); + if (numFailures == 0) { return new AutoDisableConnectionOutput(false); } else if (numFailures >= configs.getMaxFailedJobsInARowBeforeConnectionDisable()) { // disable connection if max consecutive failed jobs limit has been hit disableConnection(input.getConnectionId(), lastJob); return new AutoDisableConnectionOutput(true); - } else if (numFailures == maxFailedJobsInARowBeforeConnectionDisableWarning) { + } else if (numFailures == maxFailedJobsInARowBeforeConnectionDisableWarning && !warningPreviouslySentForMaxDays) { // warn if number of consecutive failures hits 50% of MaxFailedJobsInARow jobNotifier.autoDisableConnectionWarning(lastJob); // explicitly send to email if customer.io api key is set, since email notification cannot be set by @@ -91,8 +96,6 @@ public AutoDisableConnectionOutput autoDisableFailingConnection(final AutoDisabl // calculate the number of days this connection first tried a replication job, used to ensure not to // disable or warn for `maxDaysOfOnlyFailedJobs` if the first job is younger than // `maxDaysOfOnlyFailedJobs` days, This avoids cases such as "the very first job run was a failure". - final Job firstJob = jobPersistence.getFirstReplicationJob(input.getConnectionId()) - .orElseThrow(() -> new Exception("Auto-Disable Connection should not have been attempted if no replication job has been run.")); final int numDaysSinceFirstReplicationJob = getDaysSinceTimestamp(currTimestampInSeconds, firstJob.getCreatedAtInSecond()); final boolean firstReplicationOlderThanMaxDisableDays = numDaysSinceFirstReplicationJob >= maxDaysOfOnlyFailedJobs; final boolean noPreviousSuccess = successTimestamp.isEmpty(); @@ -103,6 +106,11 @@ public AutoDisableConnectionOutput autoDisableFailingConnection(final AutoDisabl return new AutoDisableConnectionOutput(true); } + // skip warning if previously sent + if (warningPreviouslySentForMaxDays || numFailures > maxFailedJobsInARowBeforeConnectionDisableWarning) { + return new AutoDisableConnectionOutput(false); + } + final boolean firstReplicationOlderThanMaxDisableWarningDays = numDaysSinceFirstReplicationJob >= maxDaysOfOnlyFailedJobsBeforeWarning; final boolean successOlderThanPrevFailureByMaxWarningDays = // set to true if no previous success is found noPreviousSuccess || getDaysSinceTimestamp(currTimestampInSeconds, successTimestamp.get()) >= maxDaysOfOnlyFailedJobsBeforeWarning; @@ -110,7 +118,10 @@ public AutoDisableConnectionOutput autoDisableFailingConnection(final AutoDisabl // send warning if there are only failed jobs in the past maxDaysOfOnlyFailedJobsBeforeWarning days // _unless_ a warning should have already been sent in the previous failure if (firstReplicationOlderThanMaxDisableWarningDays && successOlderThanPrevFailureByMaxWarningDays) { - sendWarningIfNotPreviouslySent(successTimestamp, maxDaysOfOnlyFailedJobsBeforeWarning, firstJob, lastJob, jobs, numFailures); + jobNotifier.autoDisableConnectionWarning(lastJob); + // explicitly send to email if customer.io api key is set, since email notification cannot be set by + // configs through UI yet + jobNotifier.notifyJobByEmail(null, CONNECTION_DISABLED_WARNING_NOTIFICATION, lastJob); } } catch (final Exception e) { @@ -120,21 +131,6 @@ public AutoDisableConnectionOutput autoDisableFailingConnection(final AutoDisabl return new AutoDisableConnectionOutput(false); } - private void sendWarningIfNotPreviouslySent(final Optional successTimestamp, - final int maxDaysOfOnlyFailedJobsBeforeWarning, - final Job firstJob, - final Job lastJob, - final List jobs, - final int numFailures) { - if (numFailures > 1 && checkIfWarningPreviouslySent(successTimestamp, maxDaysOfOnlyFailedJobsBeforeWarning, firstJob, jobs)) { - return; - } - jobNotifier.autoDisableConnectionWarning(lastJob); - // explicitly send to email if customer.io api key is set, since email notification cannot be set by - // configs through UI yet - jobNotifier.notifyJobByEmail(null, CONNECTION_DISABLED_WARNING_NOTIFICATION, lastJob); - } - // Checks to see if warning should have been sent in the previous failure, if so skip sending of // warning to avoid spam // Assume warning has been sent if either of the following is true: @@ -142,11 +138,13 @@ private void sendWarningIfNotPreviouslySent(final Optional successTimestam // maxDaysOfOnlyFailedJobsBeforeWarning days after the first job // 2. success found and the previous failure occurred maxDaysOfOnlyFailedJobsBeforeWarning days // after that success - private boolean checkIfWarningPreviouslySent(final Optional successTimestamp, - final int maxDaysOfOnlyFailedJobsBeforeWarning, - final Job firstJob, - final List jobs) { - if (jobs.size() <= 1) + private boolean warningPreviouslySentForMaxDays(final int numFailures, + final Optional successTimestamp, + final int maxDaysOfOnlyFailedJobsBeforeWarning, + final Job firstJob, + final List jobs) { + // no previous warning sent if there was no previous failure + if (numFailures <= 1 || jobs.size() <= 1) return false; // get previous failed job (skipping first job since that's considered "current" job) diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java index 6e697f2c72e0..36bc505899ae 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivityTest.java @@ -87,6 +87,7 @@ void setUp() throws IOException { Mockito.when(mFeatureFlags.autoDisablesFailingConnections()).thenReturn(true); Mockito.when(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable()).thenReturn(MAX_DAYS_OF_ONLY_FAILED_JOBS); Mockito.when(mJobPersistence.getLastReplicationJob(CONNECTION_ID)).thenReturn(Optional.of(mJob)); + Mockito.when(mJobPersistence.getFirstReplicationJob(CONNECTION_ID)).thenReturn(Optional.of(mJob)); } // test warnings @@ -118,7 +119,6 @@ public void testWarningNotificationsForAutoDisablingMaxDaysOfFailure() throws IO .thenReturn(Collections.singletonList(FAILED_JOB)); Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); - Mockito.when(mJobPersistence.getFirstReplicationJob(CONNECTION_ID)).thenReturn(Optional.of(mJob)); Mockito.when(mJob.getCreatedAtInSecond()).thenReturn( CURR_INSTANT.getEpochSecond() - TimeUnit.DAYS.toSeconds(MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_WARNING)); @@ -140,7 +140,27 @@ public void testWarningNotificationsDoesNotSpam() throws IOException { CURR_INSTANT.minus(MAX_DAYS_OF_ONLY_FAILED_JOBS, ChronoUnit.DAYS))).thenReturn(jobs); Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); - Mockito.when(mJobPersistence.getFirstReplicationJob(CONNECTION_ID)).thenReturn(Optional.of(mJob)); + Mockito.when(mJob.getCreatedAtInSecond()).thenReturn(mJobCreateOrUpdatedInSeconds); + Mockito.when(mJob.getUpdatedAtInSecond()).thenReturn(mJobCreateOrUpdatedInSeconds); + + final AutoDisableConnectionOutput output = autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); + assertThat(output.isDisabled()).isFalse(); + assertThat(standardSync.getStatus()).isEqualTo(Status.ACTIVE); + Mockito.verify(mJobNotifier, Mockito.never()).autoDisableConnection(Mockito.any()); + Mockito.verify(mJobNotifier, Mockito.never()).autoDisableConnectionWarning(Mockito.any()); + } + + @Test + @DisplayName("Test that a notification warning is not sent after one was just sent for consecutive failures") + public void testWarningNotificationsDoesNotSpamAfterConsecutiveFailures() throws IOException { + final List jobs = new ArrayList<>(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW - 1, FAILED_JOB)); + final long mJobCreateOrUpdatedInSeconds = CURR_INSTANT.getEpochSecond() - TimeUnit.DAYS.toSeconds(MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_WARNING); + + Mockito.when(mConfigs.getMaxDaysOfOnlyFailedJobsBeforeConnectionDisable()).thenReturn(MAX_DAYS_OF_ONLY_FAILED_JOBS); + Mockito.when(mJobPersistence.listJobStatusAndTimestampWithConnection(CONNECTION_ID, REPLICATION_TYPES, + CURR_INSTANT.minus(MAX_DAYS_OF_ONLY_FAILED_JOBS, ChronoUnit.DAYS))).thenReturn(jobs); + + Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); Mockito.when(mJob.getCreatedAtInSecond()).thenReturn(mJobCreateOrUpdatedInSeconds); Mockito.when(mJob.getUpdatedAtInSecond()).thenReturn(mJobCreateOrUpdatedInSeconds); @@ -160,7 +180,6 @@ public void testOnlyFailuresButFirstJobYoungerThanMaxDaysWarning() throws IOExce .thenReturn(Collections.singletonList(FAILED_JOB)); Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); - Mockito.when(mJobPersistence.getFirstReplicationJob(CONNECTION_ID)).thenReturn(Optional.of(mJob)); Mockito.when(mJob.getCreatedAtInSecond()).thenReturn(CURR_INSTANT.getEpochSecond()); final AutoDisableConnectionOutput output = autoDisableActivity.autoDisableFailingConnection(ACTIVITY_INPUT); @@ -199,7 +218,6 @@ public void testLessThanMaxFailuresInARow() throws IOException { jobs.add(SUCCEEDED_JOB); Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); - Mockito.when(mJobPersistence.getFirstReplicationJob(CONNECTION_ID)).thenReturn(Optional.of(mJob)); Mockito.when(mJobPersistence.listJobStatusAndTimestampWithConnection(CONNECTION_ID, REPLICATION_TYPES, CURR_INSTANT.minus(MAX_DAYS_OF_ONLY_FAILED_JOBS, ChronoUnit.DAYS))).thenReturn(jobs); Mockito.when(mJob.getCreatedAtInSecond()).thenReturn( @@ -237,7 +255,6 @@ public void testOnlyFailuresInMaxDays() throws IOException, JsonValidationExcept .thenReturn(Collections.singletonList(FAILED_JOB)); Mockito.when(mConfigs.getMaxFailedJobsInARowBeforeConnectionDisable()).thenReturn(MAX_FAILURE_JOBS_IN_A_ROW); - Mockito.when(mJobPersistence.getFirstReplicationJob(CONNECTION_ID)).thenReturn(Optional.of(mJob)); Mockito.when(mJob.getCreatedAtInSecond()).thenReturn( CURR_INSTANT.getEpochSecond() - TimeUnit.DAYS.toSeconds(MAX_DAYS_OF_ONLY_FAILED_JOBS)); Mockito.when(mConfigRepository.getStandardSync(CONNECTION_ID)).thenReturn(standardSync); From b16e13e9cf57ec2fc5ed48eab5595e603c9614bf Mon Sep 17 00:00:00 2001 From: VitaliiMaltsev <39538064+VitaliiMaltsev@users.noreply.github.com> Date: Wed, 27 Apr 2022 14:01:26 +0300 Subject: [PATCH 19/39] Redshift Destination: update spec (#12100) * Redshift Destination: update spec * update spec.json * update links in spec.json * added more links to spec.json | refactoring * updated docs with stadard connector template * added hyperlink to documentation for part_size field --- .../src/main/resources/spec.json | 28 ++-- docs/integrations/destinations/redshift.md | 158 ++++++++++-------- 2 files changed, 101 insertions(+), 85 deletions(-) diff --git a/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json index 360372f2ca89..243259955ddf 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-redshift/src/main/resources/spec.json @@ -49,22 +49,22 @@ "title": "Default Schema" }, "s3_bucket_name": { - "title": "S3 Bucket Name", + "title": "S3 Bucket Name (Optional)", "type": "string", - "description": "The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.", + "description": "The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.", "examples": ["airbyte.staging"] }, "s3_bucket_path": { - "title": "S3 Bucket Path", + "title": "S3 Bucket Path (Optional)", "type": "string", - "description": "The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory.", + "description": "The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See path's name recommendations for more details.", "examples": ["data_sync/test"] }, "s3_bucket_region": { - "title": "S3 Bucket Region", + "title": "S3 Bucket Region (Optional)", "type": "string", "default": "", - "description": "The region of the S3 staging bucket to use if utilising a copy strategy.", + "description": "The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.", "enum": [ "", "us-east-1", @@ -94,14 +94,14 @@ }, "access_key_id": { "type": "string", - "description": "The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket.", - "title": "S3 Key Id", + "description": "This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key.", + "title": "S3 Key Id (Optional)", "airbyte_secret": true }, "secret_access_key": { "type": "string", - "description": "The corresponding secret to the above access key id.", - "title": "S3 Access Key", + "description": "The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.", + "title": "S3 Access Key (Optional)", "airbyte_secret": true }, "part_size": { @@ -109,13 +109,13 @@ "minimum": 10, "maximum": 100, "examples": ["10"], - "description": "Optional. Increase this if syncing tables larger than 100GB. Only relevant for COPY. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default limit of 100GB tables. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care.", - "title": "Stream Part Size" + "description": "Increase this if syncing tables larger than 100GB. Only relevant for COPY. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default limit of 100GB tables. Note: a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care. See docs for details.", + "title": "Stream Part Size (Optional)" }, "purge_staging_data": { - "title": "Purge Staging Files and Tables", + "title": "Purge Staging Files and Tables (Optional)", "type": "boolean", - "description": "Whether to delete the staging files from S3 after completing the sync. See the docs for details. Only relevant for COPY. Defaults to true.", + "description": "Whether to delete the staging files from S3 after completing the sync. See docs for details.", "default": true } } diff --git a/docs/integrations/destinations/redshift.md b/docs/integrations/destinations/redshift.md index 8339f8e7c8ec..6dd424fdaa3d 100644 --- a/docs/integrations/destinations/redshift.md +++ b/docs/integrations/destinations/redshift.md @@ -1,124 +1,139 @@ # Redshift -## Overview +This page guides you through the process of setting up the Redshift destination connector. + +## Prerequisites The Airbyte Redshift destination allows you to sync data to Redshift. This Redshift destination connector has two replication strategies: 1. INSERT: Replicates data via SQL INSERT queries. This is built on top of the destination-jdbc code base and is configured to rely on JDBC 4.2 standard drivers provided by Amazon via Mulesoft [here](https://mvnrepository.com/artifact/com.amazon.redshift/redshift-jdbc42) as described in Redshift documentation [here](https://docs.aws.amazon.com/redshift/latest/mgmt/jdbc20-install.html). **Not recommended for production workloads as this does not scale well**. -2. COPY: Replicates data by first uploading data to an S3 bucket and issuing a COPY command. This is the recommended loading approach described by Redshift [best practices](https://docs.aws.amazon.com/redshift/latest/dg/c_loading-data-best-practices.html). Requires an S3 bucket and credentials. - -Airbyte automatically picks an approach depending on the given configuration - if S3 configuration is present, Airbyte will use the COPY strategy and vice versa. - -We recommend users use INSERT for testing, to avoid any additional setup, and switch to COPY for production workloads. - -### Sync overview - -#### Output schema - -Each stream will be output into its own raw table in Redshift. Each table will contain 3 columns: - -* `_airbyte_ab_id`: a uuid assigned by Airbyte to each event that is processed. The column type in Redshift is `VARCHAR`. -* `_airbyte_emitted_at`: a timestamp representing when the event was pulled from the data source. The column type in Redshift is `TIMESTAMP WITH TIME ZONE`. -* `_airbyte_data`: a json blob representing with the event data. The column type in Redshift is `VARCHAR` but can be be parsed with JSON functions. - -#### Features -| Feature | Supported?\(Yes/No\) | Notes | -| :--- | :--- | :--- | -| Full Refresh Sync | Yes | | -| Incremental - Append Sync | Yes | | -| Incremental - Deduped History | Yes | | -| Namespaces | Yes | | -| SSL Support | Yes | | +For INSERT strategy: +* **Host** +* **Port** +* **Username** +* **Password** +* **Schema** +* **Database** + * This database needs to exist within the cluster provided. -#### Target Database +2. COPY: Replicates data by first uploading data to an S3 bucket and issuing a COPY command. This is the recommended loading approach described by Redshift [best practices](https://docs.aws.amazon.com/redshift/latest/dg/c_loading-data-best-practices.html). Requires an S3 bucket and credentials. -You will need to choose an existing database or create a new database that will be used to store synced data from Airbyte. +Airbyte automatically picks an approach depending on the given configuration - if S3 configuration is present, Airbyte will use the COPY strategy and vice versa. -## Getting started +For COPY strategy: -### Requirements +* **S3 Bucket Name** + * See [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) to create an S3 bucket. +* **S3 Bucket Region** + * Place the S3 bucket and the Redshift cluster in the same region to save on networking costs. +* **Access Key Id** + * See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) on how to generate an access key. + * We recommend creating an Airbyte-specific user. This user will require [read and write permissions](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_examples_s3_rw-bucket.html) to objects in the staging bucket. +* **Secret Access Key** + * Corresponding key to the above key id. +* **Part Size** + * Affects the size limit of an individual Redshift table. Optional. Increase this if syncing tables larger than 100GB. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default table limit of 100GB. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care. -1. Active Redshift cluster -2. Allow connections from Airbyte to your Redshift cluster \(if they exist in separate VPCs\) -3. A staging S3 bucket with credentials \(for the COPY strategy\). +Optional parameters: +* **Bucket Path** + * The directory within the S3 bucket to place the staging data. For example, if you set this to `yourFavoriteSubdirectory`, we will place the staging data inside `s3://yourBucket/yourFavoriteSubdirectory`. If not provided, defaults to the root directory. +* **Purge Staging Data** + * Whether to delete the staging files from S3 after completing the sync. Specifically, the connector will create CSV files named `bucketPath/namespace/streamName/syncDate_epochMillis_randomUuid.csv` containing three columns (`ab_id`, `data`, `emitted_at`). Normally these files are deleted after the `COPY` command completes; if you want to keep them for other purposes, set `purge_staging_data` to `false`. -:::info -Even if your Airbyte instance is running on a server in the same VPC as your Redshift cluster, you may need to place them in the **same security group** to allow connections between the two. +## Step 1: Set up Redshift -::: +1. [Log in](https://aws.amazon.com/console/) to AWS Management console. + If you don't have a AWS account already, you’ll need to [create](https://aws.amazon.com/premiumsupport/knowledge-center/create-and-activate-aws-account/) one in order to use the API. +2. Go to the AWS Redshift service +3. [Create](https://docs.aws.amazon.com/ses/latest/dg/event-publishing-redshift-cluster.html) and activate AWS Redshift cluster if you don't have one ready +4. (Optional) [Allow](https://aws.amazon.com/premiumsupport/knowledge-center/cannot-connect-redshift-cluster/) connections from Airbyte to your Redshift cluster \(if they exist in separate VPCs\) +5. (Optional) [Create](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) a staging S3 bucket \(for the COPY strategy\). -### Setup guide +## Step 2: Set up the destination connector in Airbyte -#### 1. Make sure your cluster is active and accessible from the machine running Airbyte +**For Airbyte Cloud:** -This is dependent on your networking setup. The easiest way to verify if Airbyte is able to connect to your Redshift cluster is via the check connection tool in the UI. You can check AWS Redshift documentation with a tutorial on how to properly configure your cluster's access [here](https://docs.aws.amazon.com/redshift/latest/gsg/rs-gsg-authorize-cluster-access.html) +1. [Log into your Airbyte Cloud](https://cloud.airbyte.io/workspaces) account. +2. In the left navigation bar, click **Destinations**. In the top-right corner, click **+ new destination**. +3. On the destination setup page, select **Redshift** from the Destination type dropdown and enter a name for this connector. +4. Fill in all the required fields to use the INSERT or COPY strategy +5. Click `Set up destination`. -#### 2. Fill up connection info +**For Airbyte OSS:** -Next is to provide the necessary information on how to connect to your cluster such as the `host` whcih is part of the connection string or Endpoint accessible [here](https://docs.aws.amazon.com/redshift/latest/gsg/rs-gsg-connect-to-cluster.html#rs-gsg-how-to-get-connection-string) without the `port` and `database` name \(it typically includes the cluster-id, region and end with `.redshift.amazonaws.com`\). +1. Go to local Airbyte page. +2. In the left navigation bar, click **Destinations**. In the top-right corner, click **+ new destination**. +3. On the destination setup page, select **Redshift** from the Destination type dropdown and enter a name for this connector. +4. Fill in all the required fields to use the INSERT or COPY strategy +5. Click `Set up destination`. -You should have all the requirements needed to configure Redshift as a destination in the UI. You'll need the following information to configure the destination: -* **Host** -* **Port** -* **Username** -* **Password** -* **Schema** -* **Database** - * This database needs to exist within the cluster provided. +## Supported sync modes -#### 2a. Fill up S3 info \(for COPY strategy\) +The Redshift destination connector supports the following [sync modes](https://docs.airbyte.com/cloud/core-concepts/#connection-sync-mode): +- Full Refresh +- Incremental - Append Sync +- Incremental - Deduped History -Provide the required S3 info. +## Performance considerations -* **S3 Bucket Name** - * See [this](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html) to create an S3 bucket. -* **S3 Bucket Region** - * Place the S3 bucket and the Redshift cluster in the same region to save on networking costs. -* **Access Key Id** - * See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) on how to generate an access key. - * We recommend creating an Airbyte-specific user. This user will require [read and write permissions](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_examples_s3_rw-bucket.html) to objects in the staging bucket. -* **Secret Access Key** - * Corresponding key to the above key id. -* **Part Size** - * Affects the size limit of an individual Redshift table. Optional. Increase this if syncing tables larger than 100GB. Files are streamed to S3 in parts. This determines the size of each part, in MBs. As S3 has a limit of 10,000 parts per file, part size affects the table size. This is 10MB by default, resulting in a default table limit of 100GB. Note, a larger part size will result in larger memory requirements. A rule of thumb is to multiply the part size by 10 to get the memory requirement. Modify this with care. +Synchronization performance depends on the amount of data to be transferred. +Cluster scaling issues can be resolved directly using the cluster settings in the AWS Redshift console -Optional parameters: -* **Bucket Path** - * The directory within the S3 bucket to place the staging data. For example, if you set this to `yourFavoriteSubdirectory`, we will place the staging data inside `s3://yourBucket/yourFavoriteSubdirectory`. If not provided, defaults to the root directory. -* **Purge Staging Data** - * Whether to delete the staging files from S3 after completing the sync. Specifically, the connector will create CSV files named `bucketPath/namespace/streamName/syncDate_epochMillis_randomUuid.csv` containing three columns (`ab_id`, `data`, `emitted_at`). Normally these files are deleted after the `COPY` command completes; if you want to keep them for other purposes, set `purge_staging_data` to `false`. +## Connector-specific features & highlights -## Notes about Redshift Naming Conventions +### Notes about Redshift Naming Conventions From [Redshift Names & Identifiers](https://docs.aws.amazon.com/redshift/latest/dg/r_names.html): -### Standard Identifiers +#### Standard Identifiers * Begin with an ASCII single-byte alphabetic character or underscore character, or a UTF-8 multibyte character two to four bytes long. * Subsequent characters can be ASCII single-byte alphanumeric characters, underscores, or dollar signs, or UTF-8 multibyte characters two to four bytes long. * Be between 1 and 127 bytes in length, not including quotation marks for delimited identifiers. * Contain no quotation marks and no spaces. -### Delimited Identifiers +#### Delimited Identifiers Delimited identifiers \(also known as quoted identifiers\) begin and end with double quotation marks \("\). If you use a delimited identifier, you must use the double quotation marks for every reference to that object. The identifier can contain any standard UTF-8 printable characters other than the double quotation mark itself. Therefore, you can create column or table names that include otherwise illegal characters, such as spaces or the percent symbol. ASCII letters in delimited identifiers are case-insensitive and are folded to lowercase. To use a double quotation mark in a string, you must precede it with another double quotation mark character. Therefore, Airbyte Redshift destination will create tables and schemas using the Unquoted identifiers when possible or fallback to Quoted Identifiers if the names are containing special characters. -## Data Size Limitations +### Data Size Limitations Redshift specifies a maximum limit of 65535 bytes to store the raw JSON record data. Thus, when a row is too big to fit, the Redshift destination fails to load such data and currently ignores that record. See [docs](https://docs.aws.amazon.com/redshift/latest/dg/r_Character_types.html) -## Encryption +### Encryption All Redshift connections are encrypted using SSL +### Output schema + +Each stream will be output into its own raw table in Redshift. Each table will contain 3 columns: + +* `_airbyte_ab_id`: a uuid assigned by Airbyte to each event that is processed. The column type in Redshift is `VARCHAR`. +* `_airbyte_emitted_at`: a timestamp representing when the event was pulled from the data source. The column type in Redshift is `TIMESTAMP WITH TIME ZONE`. +* `_airbyte_data`: a json blob representing with the event data. The column type in Redshift is `VARCHAR` but can be be parsed with JSON functions. + +## Data type mapping + +| Redshift Type | Airbyte Type | Notes | +| :--- | :--- | :--- | +| `boolean` | `boolean` | | +| `int` | `integer` | | +| `float` | `number` | | +| `varchar` | `string` | | +| `date/varchar` | `date` | | +| `time/varchar` | `time` | | +| `timestamptz/varchar` | `timestamp_with_timezone` | | +| `varchar` | `array` | | +| `varchar` | `object` | | + ## Changelog | Version | Date | Pull Request | Subject | @@ -142,3 +157,4 @@ All Redshift connections are encrypted using SSL | 0.3.12 | 2021-07-21 | [3555](https://github.com/airbytehq/airbyte/pull/3555) | Enable partial checkpointing for halfway syncs | | 0.3.11 | 2021-07-20 | [4874](https://github.com/airbytehq/airbyte/pull/4874) | allow `additionalProperties` in connector spec | + From 9f577bb027199320066d7f69da0845f873ea974c Mon Sep 17 00:00:00 2001 From: noahkawasaki-airbyte <103465980+noahkawasaki-airbyte@users.noreply.github.com> Date: Wed, 27 Apr 2022 07:07:54 -0700 Subject: [PATCH 20/39] Update specs and definitions files for destination-postgres 0.3.19 (#12317) * Generate specs and definitions files after destination-postgres 0.3.19 * Bump destination-postgres-strict-encrypt to 0.1.5 --- .../src/main/resources/seed/destination_definitions.yaml | 2 +- .../init/src/main/resources/seed/destination_specs.yaml | 2 +- .../destination-postgres-strict-encrypt/Dockerfile | 2 +- .../src/test/resources/expected_spec.json | 6 ++++++ 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index bcf75228f113..e9f195a59822 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -167,7 +167,7 @@ - name: Postgres destinationDefinitionId: 25c5221d-dce2-4163-ade9-739ef790f503 dockerRepository: airbyte/destination-postgres - dockerImageTag: 0.3.18 + dockerImageTag: 0.3.19 documentationUrl: https://docs.airbyte.io/integrations/destinations/postgres icon: postgresql.svg - name: Pulsar diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index af9b9c4fb3d5..dfa8a9384627 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -3047,7 +3047,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-postgres:0.3.18" +- dockerImage: "airbyte/destination-postgres:0.3.19" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/postgres" connectionSpecification: diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile index 1c32dea0e209..0c472d5343f8 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-postgres-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.4 +LABEL io.airbyte.version=0.1.5 LABEL io.airbyte.name=airbyte/destination-postgres-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test/resources/expected_spec.json b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test/resources/expected_spec.json index 10e1c1251a44..8ba1678fcb55 100644 --- a/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test/resources/expected_spec.json +++ b/airbyte-integrations/connectors/destination-postgres-strict-encrypt/src/test/resources/expected_spec.json @@ -54,6 +54,12 @@ "airbyte_secret": true, "order": 5 }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "title": "JDBC URL Params", + "type": "string", + "order": 7 + }, "tunnel_method": { "type": "object", "title": "SSH Tunnel Method", From b70a6fbd87aff1c960e35f25109942ca05daa4bd Mon Sep 17 00:00:00 2001 From: LiRen Tu Date: Wed, 27 Apr 2022 07:28:58 -0700 Subject: [PATCH 21/39] Format java code (#12401) --- .../databricks/DatabricksConstants.java | 3 +- .../destination/jdbc/SqlOperations.java | 47 ++++++++++--------- .../jdbc/copy/CopyConsumerFactory.java | 42 ++++++++--------- .../destination/jdbc/copy/StreamCopier.java | 1 + .../staging/StagingOperations.java | 2 +- .../LocalJsonDestinationAcceptanceTest.java | 1 - .../MariaDbTestDataComparator.java | 24 ++++++---- ...bColumnstoreDestinationAcceptanceTest.java | 5 +- .../MeiliSearchDestinationAcceptanceTest.java | 5 +- .../MongodbDestinationAcceptanceTest.java | 5 +- .../mqtt/MqttDestinationAcceptanceTest.java | 5 +- .../destination/oracle/OracleOperations.java | 22 ++++----- .../oracle/OracleTestDataComparator.java | 30 +++++++----- .../SshOracleDestinationAcceptanceTest.java | 4 +- ...ryptedOracleDestinationAcceptanceTest.java | 5 +- 15 files changed, 100 insertions(+), 101 deletions(-) diff --git a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksConstants.java b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksConstants.java index f3d014d63726..4a5c1a4b146a 100644 --- a/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksConstants.java +++ b/airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksConstants.java @@ -15,7 +15,6 @@ public class DatabricksConstants { "delta.autoOptimize.optimizeWrite = true", "delta.autoOptimize.autoCompact = true"); - private DatabricksConstants() { - } + private DatabricksConstants() {} } diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/SqlOperations.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/SqlOperations.java index 3d4eea93012b..37212fcff9e1 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/SqlOperations.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/SqlOperations.java @@ -21,7 +21,7 @@ public interface SqlOperations { /** * Create a schema with provided name if it does not already exist. * - * @param database Database that the connector is syncing + * @param database Database that the connector is syncing * @param schemaName Name of schema. * @throws Exception exception */ @@ -30,7 +30,7 @@ public interface SqlOperations { /** * Denotes whether the schema exists in destination database * - * @param database Database that the connector is syncing + * @param database Database that the connector is syncing * @param schemaName Name of schema. * @return true if the schema exists in destination database, false if it doesn't */ @@ -41,9 +41,9 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN /** * Create a table with provided name in provided schema if it does not already exist. * - * @param database Database that the connector is syncing + * @param database Database that the connector is syncing * @param schemaName Name of schema - * @param tableName Name of table + * @param tableName Name of table * @throws Exception exception */ void createTableIfNotExists(JdbcDatabase database, String schemaName, String tableName) throws Exception; @@ -51,9 +51,9 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN /** * Query to create a table with provided name in provided schema if it does not already exist. * - * @param database Database that the connector is syncing + * @param database Database that the connector is syncing * @param schemaName Name of schema - * @param tableName Name of table + * @param tableName Name of table * @return query */ String createTableQuery(JdbcDatabase database, String schemaName, String tableName); @@ -62,7 +62,7 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN * Drop the table if it exists. * * @param schemaName Name of schema - * @param tableName Name of table + * @param tableName Name of table * @throws Exception exception */ void dropTableIfExists(JdbcDatabase database, String schemaName, String tableName) throws Exception; @@ -70,9 +70,9 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN /** * Query to remove all records from a table. Assumes the table exists. * - * @param database Database that the connector is syncing + * @param database Database that the connector is syncing * @param schemaName Name of schema - * @param tableName Name of table + * @param tableName Name of table * @return Query */ String truncateTableQuery(JdbcDatabase database, String schemaName, String tableName); @@ -80,20 +80,21 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN /** * Insert records into table. Assumes the table exists. * - * @param database Database that the connector is syncing - * @param records Records to insert. + * @param database Database that the connector is syncing + * @param records Records to insert. * @param schemaName Name of schema - * @param tableName Name of table + * @param tableName Name of table * @throws Exception exception */ void insertRecords(JdbcDatabase database, List records, String schemaName, String tableName) throws Exception; /** - * Query to copy all records from source table to destination table. Both tables must be in the specified schema. Assumes both table exist. + * Query to copy all records from source table to destination table. Both tables must be in the + * specified schema. Assumes both table exist. * - * @param database Database that the connector is syncing - * @param schemaName Name of schema - * @param sourceTableName Name of source table + * @param database Database that the connector is syncing + * @param schemaName Name of schema + * @param sourceTableName Name of source table * @param destinationTableName Name of destination table * @return Query */ @@ -103,7 +104,7 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN * Given an arbitrary number of queries, execute a transaction. * * @param database Database that the connector is syncing - * @param queries Queries to execute + * @param queries Queries to execute * @throws Exception exception */ void executeTransaction(JdbcDatabase database, List queries) throws Exception; @@ -120,19 +121,21 @@ default boolean isSchemaExists(final JdbcDatabase database, final String schemaN */ boolean isSchemaRequired(); - /** - * The method is responsible for executing some specific DB Engine logic in onClose method. We can override this method to execute specific logic - * e.g. to handle any necessary migrations in the destination, etc. + * The method is responsible for executing some specific DB Engine logic in onClose method. We can + * override this method to execute specific logic e.g. to handle any necessary migrations in the + * destination, etc. *

- * In next example you can see how migration from VARCHAR to SUPER column is handled for the Redshift destination: + * In next example you can see how migration from VARCHAR to SUPER column is handled for the + * Redshift destination: * * @param database - Database that the connector is interacting with - * @param schemaNames - schemas will be discovered + * @param schemaNames - schemas will be discovered * @see io.airbyte.integrations.destination.redshift.RedshiftSqlOperations#onDestinationCloseOperations */ default void onDestinationCloseOperations(JdbcDatabase database, Set schemaNames) { // do nothing LOGGER.info("No onDestinationCloseOperations required for this destination."); } + } diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java index 9970402d6787..2fb4d0b3bf3d 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java @@ -35,13 +35,13 @@ public class CopyConsumerFactory { private static final Logger LOGGER = LoggerFactory.getLogger(CopyConsumerFactory.class); public static AirbyteMessageConsumer create(final Consumer outputRecordCollector, - final JdbcDatabase database, - final SqlOperations sqlOperations, - final ExtendedNameTransformer namingResolver, - final T config, - final ConfiguredAirbyteCatalog catalog, - final StreamCopierFactory streamCopierFactory, - final String defaultSchema) { + final JdbcDatabase database, + final SqlOperations sqlOperations, + final ExtendedNameTransformer namingResolver, + final T config, + final ConfiguredAirbyteCatalog catalog, + final StreamCopierFactory streamCopierFactory, + final String defaultSchema) { final Map pairToCopier = createWriteConfigs( namingResolver, config, @@ -65,12 +65,12 @@ public static AirbyteMessageConsumer create(final Consumer o } private static Map createWriteConfigs(final ExtendedNameTransformer namingResolver, - final T config, - final ConfiguredAirbyteCatalog catalog, - final StreamCopierFactory streamCopierFactory, - final String defaultSchema, - final JdbcDatabase database, - final SqlOperations sqlOperations) { + final T config, + final ConfiguredAirbyteCatalog catalog, + final StreamCopierFactory streamCopierFactory, + final String defaultSchema, + final JdbcDatabase database, + final SqlOperations sqlOperations) { final Map pairToCopier = new HashMap<>(); final String stagingFolder = UUID.randomUUID().toString(); for (final var configuredStream : catalog.getStreams()) { @@ -89,8 +89,8 @@ private static OnStartFunction onStartFunction(final Map recordWriterFunction(final Map pairToCopier, - final SqlOperations sqlOperations, - final Map pairToIgnoredRecordCount) { + final SqlOperations sqlOperations, + final Map pairToIgnoredRecordCount) { return (AirbyteStreamNameNamespacePair pair, List records) -> { final var fileName = pairToCopier.get(pair).prepareStagingFile(); for (final AirbyteRecordMessage recordMessage : records) { @@ -117,9 +117,9 @@ private static CheckAndRemoveRecordWriter removeStagingFilePrinter(final Map pairToCopier, - final JdbcDatabase database, - final SqlOperations sqlOperations, - final Map pairToIgnoredRecordCount) { + final JdbcDatabase database, + final SqlOperations sqlOperations, + final Map pairToIgnoredRecordCount) { return (hasFailed) -> { pairToIgnoredRecordCount .forEach((pair, count) -> LOGGER.warn("A total of {} record(s) of data from stream {} were invalid and were ignored.", count, pair)); @@ -128,9 +128,9 @@ private static OnCloseFunction onCloseFunction(final Map pairToCopier, - boolean hasFailed, - final JdbcDatabase db, - final SqlOperations sqlOperations) + boolean hasFailed, + final JdbcDatabase db, + final SqlOperations sqlOperations) throws Exception { Exception firstException = null; List streamCopiers = new ArrayList<>(pairToCopier.values()); diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/StreamCopier.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/StreamCopier.java index 93eb78cadafc..d655bea2f147 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/StreamCopier.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/StreamCopier.java @@ -77,4 +77,5 @@ public interface StreamCopier { * @return current staging file name */ String getCurrentFile(); + } diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingOperations.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingOperations.java index 5af382004d75..e2a1b799e48c 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingOperations.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingOperations.java @@ -11,7 +11,7 @@ import java.util.UUID; import org.joda.time.DateTime; -public interface StagingOperations extends SqlOperations { +public interface StagingOperations extends SqlOperations { String getStageName(String namespace, String streamName); diff --git a/airbyte-integrations/connectors/destination-local-json/src/test-integration/java/io/airbyte/integrations/destination/local_json/LocalJsonDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-local-json/src/test-integration/java/io/airbyte/integrations/destination/local_json/LocalJsonDestinationAcceptanceTest.java index aa17f0a82513..63e7dd55d6c6 100644 --- a/airbyte-integrations/connectors/destination-local-json/src/test-integration/java/io/airbyte/integrations/destination/local_json/LocalJsonDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-local-json/src/test-integration/java/io/airbyte/integrations/destination/local_json/LocalJsonDestinationAcceptanceTest.java @@ -14,7 +14,6 @@ import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; - import java.nio.file.Files; import java.nio.file.Path; import java.util.List; diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariaDbTestDataComparator.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariaDbTestDataComparator.java index 45b6f093f5f9..d10d4349dc23 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariaDbTestDataComparator.java +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariaDbTestDataComparator.java @@ -1,22 +1,26 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.destination.mariadb_columnstore; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; - import java.util.ArrayList; import java.util.List; public class MariaDbTestDataComparator extends AdvancedTestDataComparator { - private final ExtendedNameTransformer namingResolver = new MariadbColumnstoreNameTransformer(); + private final ExtendedNameTransformer namingResolver = new MariadbColumnstoreNameTransformer(); + + @Override + protected List resolveIdentifier(final String identifier) { + final List result = new ArrayList<>(); + final String resolved = namingResolver.getIdentifier(identifier); + result.add(identifier); + result.add(resolved); - @Override - protected List resolveIdentifier(final String identifier) { - final List result = new ArrayList<>(); - final String resolved = namingResolver.getIdentifier(identifier); - result.add(identifier); - result.add(resolved); + return result; + } - return result; - } } diff --git a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java index 442e684de020..8098ab53ae45 100644 --- a/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mariadb-columnstore/src/test-integration/java/io/airbyte/integrations/destination/mariadb_columnstore/MariadbColumnstoreDestinationAcceptanceTest.java @@ -13,13 +13,10 @@ import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.sql.SQLException; -import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; - -import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.containers.MariaDBContainer; diff --git a/airbyte-integrations/connectors/destination-meilisearch/src/test-integration/java/io/airbyte/integrations/destination/meilisearch/MeiliSearchDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-meilisearch/src/test-integration/java/io/airbyte/integrations/destination/meilisearch/MeiliSearchDestinationAcceptanceTest.java index 66cd9a83e29b..bd94430bebdb 100644 --- a/airbyte-integrations/connectors/destination-meilisearch/src/test-integration/java/io/airbyte/integrations/destination/meilisearch/MeiliSearchDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-meilisearch/src/test-integration/java/io/airbyte/integrations/destination/meilisearch/MeiliSearchDestinationAcceptanceTest.java @@ -14,15 +14,14 @@ import io.airbyte.commons.stream.MoreStreams; import io.airbyte.commons.text.Names; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; - -import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import org.testcontainers.containers.GenericContainer; import org.testcontainers.utility.DockerImageName; diff --git a/airbyte-integrations/connectors/destination-mongodb/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mongodb/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationAcceptanceTest.java index ac93c4e54a5d..a04fdd273ec6 100644 --- a/airbyte-integrations/connectors/destination-mongodb/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mongodb/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationAcceptanceTest.java @@ -12,11 +12,10 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.db.mongodb.MongoDatabase; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; -import java.util.ArrayList; -import java.util.List; - import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; +import java.util.ArrayList; +import java.util.List; import org.bson.Document; import org.testcontainers.containers.MongoDBContainer; diff --git a/airbyte-integrations/connectors/destination-mqtt/src/test-integration/java/io/airbyte/integrations/destination/mqtt/MqttDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-mqtt/src/test-integration/java/io/airbyte/integrations/destination/mqtt/MqttDestinationAcceptanceTest.java index 8e216112d462..d4f9b381187c 100644 --- a/airbyte-integrations/connectors/destination-mqtt/src/test-integration/java/io/airbyte/integrations/destination/mqtt/MqttDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mqtt/src/test-integration/java/io/airbyte/integrations/destination/mqtt/MqttDestinationAcceptanceTest.java @@ -13,6 +13,8 @@ import com.hivemq.testcontainer.junit5.HiveMQTestContainerExtension; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.SocketException; @@ -23,9 +25,6 @@ import java.util.List; import java.util.Map; import java.util.UUID; - -import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import org.eclipse.paho.client.mqttv3.MqttClient; import org.eclipse.paho.client.mqttv3.MqttConnectOptions; import org.eclipse.paho.client.mqttv3.MqttException; diff --git a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleOperations.java b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleOperations.java index b32cf07cbb45..a133dfb5285f 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleOperations.java +++ b/airbyte-integrations/connectors/destination-oracle/src/main/java/io/airbyte/integrations/destination/oracle/OracleOperations.java @@ -94,9 +94,9 @@ public String truncateTableQuery(final JdbcDatabase database, final String schem @Override public void insertRecords(final JdbcDatabase database, - final List records, - final String schemaName, - final String tempTableName) + final List records, + final String schemaName, + final String tempTableName) throws Exception { final String tableName = String.format("%s.%s", schemaName, tempTableName); final String columns = String.format("(%s, %s, %s)", @@ -107,11 +107,11 @@ public void insertRecords(final JdbcDatabase database, // Adapted from SqlUtils.insertRawRecordsInSingleQuery to meet some needs specific to Oracle syntax private static void insertRawRecordsInSingleQuery(final String tableName, - final String columns, - final String recordQueryComponent, - final JdbcDatabase jdbcDatabase, - final List records, - final Supplier uuidSupplier) + final String columns, + final String recordQueryComponent, + final JdbcDatabase jdbcDatabase, + final List records, + final Supplier uuidSupplier) throws SQLException { if (records.isEmpty()) { return; @@ -152,9 +152,9 @@ private static void insertRawRecordsInSingleQuery(final String tableName, @Override public String copyTableQuery(final JdbcDatabase database, - final String schemaName, - final String sourceTableName, - final String destinationTableName) { + final String schemaName, + final String sourceTableName, + final String destinationTableName) { return String.format("INSERT INTO %s.%s SELECT * FROM %s.%s\n", schemaName, destinationTableName, schemaName, sourceTableName); } diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/OracleTestDataComparator.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/OracleTestDataComparator.java index 5be791e75410..0ddb650fe2c6 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/OracleTestDataComparator.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/OracleTestDataComparator.java @@ -1,25 +1,29 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.destination.oracle; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; - import java.util.ArrayList; import java.util.List; public class OracleTestDataComparator extends AdvancedTestDataComparator { - private final ExtendedNameTransformer namingResolver = new OracleNameTransformer(); + private final ExtendedNameTransformer namingResolver = new OracleNameTransformer(); - @Override - protected List resolveIdentifier(final String identifier) { - final List result = new ArrayList<>(); - final String resolved = namingResolver.getIdentifier(identifier); - result.add(identifier); - result.add(resolved); - if (!resolved.startsWith("\"")) { - result.add(resolved.toLowerCase()); - result.add(resolved.toUpperCase()); - } - return result; + @Override + protected List resolveIdentifier(final String identifier) { + final List result = new ArrayList<>(); + final String resolved = namingResolver.getIdentifier(identifier); + result.add(identifier); + result.add(resolved); + if (!resolved.startsWith("\"")) { + result.add(resolved.toLowerCase()); + result.add(resolved.toUpperCase()); } + return result; + } + } diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java index ee48ebcbcc74..133a44263c25 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/SshOracleDestinationAcceptanceTest.java @@ -17,13 +17,11 @@ import io.airbyte.integrations.base.ssh.SshTunnel; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; - -import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import org.testcontainers.containers.Network; public abstract class SshOracleDestinationAcceptanceTest extends DestinationAcceptanceTest { diff --git a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java index fd404bee7955..1342c57dafd3 100644 --- a/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-oracle/src/test-integration/java/io/airbyte/integrations/destination/oracle/UnencryptedOracleDestinationAcceptanceTest.java @@ -17,13 +17,10 @@ import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.sql.SQLException; -import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; - -import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; -import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import org.junit.Test; public class UnencryptedOracleDestinationAcceptanceTest extends DestinationAcceptanceTest { From eea6d1a95ed5d83715b96140a982c3439948a1a9 Mon Sep 17 00:00:00 2001 From: Serhii Lazebnyi <53845333+lazebnyi@users.noreply.github.com> Date: Wed, 27 Apr 2022 17:42:01 +0300 Subject: [PATCH 22/39] Source Instagram: Deleted read_insights scope from OAuth (#12344) * Deleted read_insights scope from oauth * Deleted read_insights scope from test --- .../io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java | 2 +- .../io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java index c330b42980a6..48f72b58144a 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlow.java @@ -12,7 +12,7 @@ // Instagram Graph API require Facebook API User token public class InstagramOAuthFlow extends FacebookMarketingOAuthFlow { - private static final String SCOPES = "ads_management,instagram_basic,instagram_manage_insights,read_insights"; + private static final String SCOPES = "ads_management,instagram_basic,instagram_manage_insights"; public InstagramOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { super(configRepository, httpClient); diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java index f4ed295a2300..31cb39bc935f 100644 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/facebook/InstagramOAuthFlowTest.java @@ -20,7 +20,7 @@ protected BaseOAuthFlow getOAuthFlow() { @Override protected String getExpectedConsentUrl() { - return "https://www.facebook.com/v12.0/dialog/oauth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scope=ads_management%2Cinstagram_basic%2Cinstagram_manage_insights%2Cread_insights"; + return "https://www.facebook.com/v12.0/dialog/oauth?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scope=ads_management%2Cinstagram_basic%2Cinstagram_manage_insights"; } @Override From 3ece0c4774fdbf57ed472dc0e8c5ea945b5e60f8 Mon Sep 17 00:00:00 2001 From: Edmundo Ruiz Ghanem <168664+edmundito@users.noreply.github.com> Date: Wed, 27 Apr 2022 10:43:46 -0400 Subject: [PATCH 23/39] Replace DeleteModal with Confirmation Modal (#12275) When delete is confirmed, navigate away from route --- .../ConfirmationModal/ConfirmationModal.tsx | 4 +- .../components/DeleteBlock/DeleteBlock.tsx | 25 +++++++-- .../DeleteBlock/components/DeleteModal.tsx | 52 ------------------- .../ConfirmationModalService.tsx | 1 + 4 files changed, 24 insertions(+), 58 deletions(-) delete mode 100644 airbyte-webapp/src/components/DeleteBlock/components/DeleteModal.tsx diff --git a/airbyte-webapp/src/components/ConfirmationModal/ConfirmationModal.tsx b/airbyte-webapp/src/components/ConfirmationModal/ConfirmationModal.tsx index 2747f956b9aa..5bc370088960 100644 --- a/airbyte-webapp/src/components/ConfirmationModal/ConfirmationModal.tsx +++ b/airbyte-webapp/src/components/ConfirmationModal/ConfirmationModal.tsx @@ -29,6 +29,7 @@ export interface ConfirmationModalProps { text: string; submitButtonText: string; onSubmit: () => void; + submitButtonDataId?: string; } export const ConfirmationModal: React.FC = ({ @@ -37,6 +38,7 @@ export const ConfirmationModal: React.FC = ({ text, onSubmit, submitButtonText, + submitButtonDataId, }) => ( }> @@ -45,7 +47,7 @@ export const ConfirmationModal: React.FC = ({ - diff --git a/airbyte-webapp/src/components/DeleteBlock/DeleteBlock.tsx b/airbyte-webapp/src/components/DeleteBlock/DeleteBlock.tsx index 4769a2fe519f..680cac1f929e 100644 --- a/airbyte-webapp/src/components/DeleteBlock/DeleteBlock.tsx +++ b/airbyte-webapp/src/components/DeleteBlock/DeleteBlock.tsx @@ -1,11 +1,12 @@ -import React, { useState } from "react"; +import React, { useCallback } from "react"; import { FormattedMessage } from "react-intl"; import styled from "styled-components"; import { Button, H5 } from "components"; import ContentCard from "components/ContentCard"; -import DeleteModal from "./components/DeleteModal"; +import { useConfirmationModalService } from "hooks/services/ConfirmationModal"; +import useRouter from "hooks/useRouter"; type IProps = { type: "source" | "destination" | "connection"; @@ -29,7 +30,22 @@ const Text = styled.div` `; const DeleteBlock: React.FC = ({ type, onDelete }) => { - const [isModalOpen, setIsModalOpen] = useState(false); + const { openConfirmationModal, closeConfirmationModal } = useConfirmationModalService(); + const { push } = useRouter(); + + const onDeleteButtonClick = useCallback(() => { + openConfirmationModal({ + text: `tables.${type}DeleteModalText`, + title: `tables.${type}DeleteConfirm`, + submitButtonText: "form.delete", + onSubmit: async () => { + await onDelete(); + closeConfirmationModal(); + push("../.."); + }, + submitButtonDataId: "delete", + }); + }, [closeConfirmationModal, onDelete, openConfirmationModal, push, type]); return ( <> @@ -40,11 +56,10 @@ const DeleteBlock: React.FC = ({ type, onDelete }) => { - - {isModalOpen && setIsModalOpen(false)} onSubmit={onDelete} />} ); }; diff --git a/airbyte-webapp/src/components/DeleteBlock/components/DeleteModal.tsx b/airbyte-webapp/src/components/DeleteBlock/components/DeleteModal.tsx deleted file mode 100644 index a8a4f36859ba..000000000000 --- a/airbyte-webapp/src/components/DeleteBlock/components/DeleteModal.tsx +++ /dev/null @@ -1,52 +0,0 @@ -import React from "react"; -import { FormattedMessage } from "react-intl"; -import { useMutation } from "react-query"; -import styled from "styled-components"; - -import { Button, LoadingButton } from "components"; -import Modal from "components/Modal"; -export type IProps = { - onClose: () => void; - onSubmit: () => Promise; - type: "source" | "destination" | "connection"; -}; - -const Content = styled.div` - width: 585px; - font-size: 14px; - line-height: 28px; - padding: 10px 40px 15px 37px; - white-space: pre-line; -`; - -const ButtonContent = styled.div` - padding-top: 28px; - display: flex; - justify-content: flex-end; -`; - -const ButtonWithMargin = styled(Button)` - margin-right: 12px; -`; - -const DeleteModal: React.FC = ({ onClose, onSubmit, type }) => { - const { isLoading, mutateAsync } = useMutation(() => onSubmit()); - - return ( - }> - - - - - - - mutateAsync()} data-id="delete"> - - - - - - ); -}; - -export default DeleteModal; diff --git a/airbyte-webapp/src/hooks/services/ConfirmationModal/ConfirmationModalService.tsx b/airbyte-webapp/src/hooks/services/ConfirmationModal/ConfirmationModalService.tsx index 6b4b18ceba19..089effe26b5f 100644 --- a/airbyte-webapp/src/hooks/services/ConfirmationModal/ConfirmationModalService.tsx +++ b/airbyte-webapp/src/hooks/services/ConfirmationModal/ConfirmationModalService.tsx @@ -64,6 +64,7 @@ export const ConfirmationModalService = ({ children }: { children: React.ReactNo text={state.confirmationModal.text} onSubmit={state.confirmationModal.onSubmit} submitButtonText={state.confirmationModal.submitButtonText} + submitButtonDataId={state.confirmationModal.submitButtonDataId} /> ) : null} From 7e1d95c783406cc2fff3ce3f767862933a094529 Mon Sep 17 00:00:00 2001 From: George Claireaux Date: Wed, 27 Apr 2022 16:18:22 +0100 Subject: [PATCH 24/39] =?UTF-8?q?=F0=9F=A4=96=20publish=20command:=20fix?= =?UTF-8?q?=20for=20version=20bumping=20multiple=20connectors=20in=20same?= =?UTF-8?q?=20PR=20=20(#12397)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add a git pull from origin master before commit auto changes * dummy bump 2 connectors * pull from current branch not master * auto-bump connector version * dummy bump 2 connectors * revert faker bump * auto-bump connector version * better descript in changelog * dummy bump... again * commit first, then pull merge, then push * auto-bump connector version * auto-bump connector version Co-authored-by: Octavia Squidington III --- .github/workflows/publish-command.yml | 1 + .../init/src/main/resources/seed/source_definitions.yaml | 4 ++-- airbyte-config/init/src/main/resources/seed/source_specs.yaml | 4 ++-- .../connectors/source-apify-dataset/Dockerfile | 2 +- airbyte-integrations/connectors/source-openweather/Dockerfile | 2 +- docs/integrations/sources/apify-dataset.md | 1 + docs/integrations/sources/openweather.md | 1 + 7 files changed, 9 insertions(+), 6 deletions(-) diff --git a/.github/workflows/publish-command.yml b/.github/workflows/publish-command.yml index b7c0aaa17519..e1590d173a25 100644 --- a/.github/workflows/publish-command.yml +++ b/.github/workflows/publish-command.yml @@ -228,6 +228,7 @@ jobs: run: | git add -u git commit -m "auto-bump connector version" + git pull origin ${{ github.event.inputs.gitref }} git push origin ${{ github.event.inputs.gitref }} - name: Add Version Bump Success Comment if: github.event.inputs.comment-id && github.event.inputs.auto-bump-version == 'true' && success() diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index a021f9fce2f3..63dae61c6306 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -42,7 +42,7 @@ - name: Apify Dataset sourceDefinitionId: 47f17145-fe20-4ef5-a548-e29b048adf84 dockerRepository: airbyte/source-apify-dataset - dockerImageTag: 0.1.9 + dockerImageTag: 0.1.11 documentationUrl: https://docs.airbyte.io/integrations/sources/apify-dataset icon: apify.svg sourceType: api @@ -529,7 +529,7 @@ - name: OpenWeather sourceDefinitionId: d8540a80-6120-485d-b7d6-272bca477d9b dockerRepository: airbyte/source-openweather - dockerImageTag: 0.1.1 + dockerImageTag: 0.1.4 documentationUrl: https://docs.airbyte.io/integrations/sources/openweather sourceType: api - name: Oracle DB diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index d69af5d6f599..0bb0de7c5594 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -511,7 +511,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-apify-dataset:0.1.9" +- dockerImage: "airbyte/source-apify-dataset:0.1.11" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/apify-dataset" connectionSpecification: @@ -5637,7 +5637,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-openweather:0.1.1" +- dockerImage: "airbyte/source-openweather:0.1.4" spec: documentationUrl: "https://docsurl.com" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-apify-dataset/Dockerfile b/airbyte-integrations/connectors/source-apify-dataset/Dockerfile index fbf31c680e2f..3c25c0ce7cbd 100644 --- a/airbyte-integrations/connectors/source-apify-dataset/Dockerfile +++ b/airbyte-integrations/connectors/source-apify-dataset/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.9 +LABEL io.airbyte.version=0.1.11 LABEL io.airbyte.name=airbyte/source-apify-dataset diff --git a/airbyte-integrations/connectors/source-openweather/Dockerfile b/airbyte-integrations/connectors/source-openweather/Dockerfile index 8b82589d62f2..b344b066bd47 100644 --- a/airbyte-integrations/connectors/source-openweather/Dockerfile +++ b/airbyte-integrations/connectors/source-openweather/Dockerfile @@ -34,5 +34,5 @@ COPY source_openweather ./source_openweather ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.1 +LABEL io.airbyte.version=0.1.4 LABEL io.airbyte.name=airbyte/source-openweather diff --git a/docs/integrations/sources/apify-dataset.md b/docs/integrations/sources/apify-dataset.md index dbd4473494ad..1390e4e36339 100644 --- a/docs/integrations/sources/apify-dataset.md +++ b/docs/integrations/sources/apify-dataset.md @@ -43,6 +43,7 @@ The Apify dataset connector uses [Apify Python Client](https://docs.apify.com/ap | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.11 | 2022-04-27 | [12397](https://github.com/airbytehq/airbyte/pull/12397) | No changes. Used connector to test publish workflow changes. | | 0.1.9 | 2022-04-05 | [PR\#11712](https://github.com/airbytehq/airbyte/pull/11712) | No changes from 0.1.4. Used connector to test publish workflow changes. | | 0.1.4 | 2021-12-23 | [PR\#8434](https://github.com/airbytehq/airbyte/pull/8434) | Update fields in source-connectors specifications | | 0.1.2 | 2021-11-08 | [PR\#7499](https://github.com/airbytehq/airbyte/pull/7499) | Remove base-python dependencies | diff --git a/docs/integrations/sources/openweather.md b/docs/integrations/sources/openweather.md index dba899a2e39d..8609d916597b 100644 --- a/docs/integrations/sources/openweather.md +++ b/docs/integrations/sources/openweather.md @@ -34,5 +34,6 @@ The free plan allows 60 calls per minute and 1,000,000 calls per month, you won' | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.4 | 2022-04-27 | [12397](https://github.com/airbytehq/airbyte/pull/12397) | No changes. Used connector to test publish workflow changes. | | 0.1.0 | 2021-10-27 | [7434](https://github.com/airbytehq/airbyte/pull/7434) | Initial release | From 1413aca478badc861a1ac580d79ff24503f432d3 Mon Sep 17 00:00:00 2001 From: Topher Lubaway Date: Wed, 27 Apr 2022 10:26:35 -0500 Subject: [PATCH 25/39] Toph deploy docs ssh repo check (#12323) * Check for ssh github repo because assumptions mostly. more clear fail messages for an unsupported workflow also removes the pipeline which now is redundant with local workflow * WIP1 * working test * More clear git test * less weird characters for grep * remove verbose for excho --- .github/workflows/documentation.yml | 37 ----------------------------- tools/bin/deploy_docusaurus | 11 +++++++++ 2 files changed, 11 insertions(+), 37 deletions(-) delete mode 100644 .github/workflows/documentation.yml diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml deleted file mode 100644 index 793172b2911a..000000000000 --- a/.github/workflows/documentation.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: compile-docusaurus-static-assets - -on: - push: - branches: [master] - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - deploy-docusaurus-to-docs-airbyte-io: - runs-on: ubuntu-latest - steps: - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - name: Check out repo - # `uses` taps GH ORG/REPO@version. - # "actions" is a default org for some common GH actions - uses: actions/checkout@v3 - with: - fetch-depth: 0 - # Node is required for yarn - - name: Set up Yarn - uses: actions/setup-node@v2 - with: - node-version: '16.13.0' - cache: 'yarn' - cache-dependency-path: docusaurus - # # Build Docusaurus website - # - name: Check for docusaurus changes not committed - # run: ./tools/bin/check_docusaurus_build_changes - # # Install and build Docusaurus website - # - name: Deploy docs to production (it's weird) - # run: ./tools/bin/deploy_docusaurus - # env: - # GITHUB_TOKEN: ${{ secrets.OCTAVIA_PAT }} - diff --git a/tools/bin/deploy_docusaurus b/tools/bin/deploy_docusaurus index 2f74f66cac9c..fb4c206b15d8 100755 --- a/tools/bin/deploy_docusaurus +++ b/tools/bin/deploy_docusaurus @@ -17,6 +17,17 @@ else exit 1 fi +# if a string +if $(git remote get-url origin | grep --quiet "http"); then + set +o xtrace + echo -e "$red_text""This program requires a ssh-based github repo""$default_text" + echo -e "$red_text""https://docs.github.com/en/authentication/connecting-to-github-with-ssh/adding-a-new-ssh-key-to-your-github-account""$default_text" + echo -e "$red_text""You will need to change your remote to continue. Yell @topher for help""$default_text" + echo -e "$red_text""change your remote command:""$default_text" + echo -e "$red_text""git remote set-url origin git@github.com:airbytehq/airbyte.git""$default_text" + exit 1 +fi + # ------------- Start Main set +o xtrace From 45a212c4c5824cdbb9f4e673895cc56709009f26 Mon Sep 17 00:00:00 2001 From: Yurii Bidiuk <35812734+yurii-bidiuk@users.noreply.github.com> Date: Wed, 27 Apr 2022 18:52:33 +0300 Subject: [PATCH 26/39] =?UTF-8?q?=F0=9F=8E=89=20=20Source=20Snowflake:=20s?= =?UTF-8?q?upport=20oauth=20=20(#10953)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add oauth flow to source-snowflake * Add unit test for oauth flow * add docs to method * format code * change configs * fixed remarks * fixed comments * fixed PR remark * update with master * format code * fix PR remmakrs * add test for backward compatibility * bump version * small fix for test * auto-bump connector version Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 149 +++++++++++-- .../source/AbstractSourceConnectorTest.java | 7 + .../source/jdbc/AbstractJdbcSource.java | 2 +- .../connectors/source-snowflake/Dockerfile | 2 +- .../connectors/source-snowflake/README.md | 24 ++- .../connectors/source-snowflake/build.gradle | 1 + .../SnowflakeDataSourceUtils.java | 197 ++++++++++++++++++ .../SnowflakeSource.java | 88 +++++--- .../src/main/resources/spec.json | 170 ++++++++++++--- .../SnowflakeJdbcSourceAcceptanceTest.java | 20 +- .../SnowflakeSourceAcceptanceTest.java | 46 ++-- .../SnowflakeSourceAuthAcceptanceTest.java | 94 +++++++++ .../sources/SnowflakeSourceDatatypeTest.java | 4 +- .../SnowflakeDataSourceUtilsTest.java | 55 +++++ .../oauth/OAuthImplementationFactory.java | 1 + .../oauth/flows/SourceSnowflakeOAuthFlow.java | 144 +++++++++++++ .../oauth/flows/SnowflakeOAuthFlowTest.java | 82 ++++++++ docs/integrations/sources/snowflake.md | 28 +++ 19 files changed, 1014 insertions(+), 102 deletions(-) create mode 100644 airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeDataSourceUtils.java create mode 100644 airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAuthAcceptanceTest.java create mode 100644 airbyte-integrations/connectors/source-snowflake/src/test/java/io/airbyte/integrations/source/snowflake/SnowflakeDataSourceUtilsTest.java create mode 100644 airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SourceSnowflakeOAuthFlow.java create mode 100644 airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnowflakeOAuthFlowTest.java diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 63dae61c6306..3ec994c85491 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -751,7 +751,7 @@ - name: Snowflake sourceDefinitionId: e2d65910-8c8b-40a1-ae7d-ee2416b2bfa2 dockerRepository: airbyte/source-snowflake - dockerImageTag: 0.1.10 + dockerImageTag: 0.1.11 documentationUrl: https://docs.airbyte.io/integrations/sources/snowflake icon: snowflake.svg sourceType: database diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 0bb0de7c5594..333e61f9bada 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -7955,7 +7955,7 @@ - - "client_secret" oauthFlowOutputParameters: - - "refresh_token" -- dockerImage: "airbyte/source-snowflake:0.1.10" +- dockerImage: "airbyte/source-snowflake:0.1.11" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/snowflake" connectionSpecification: @@ -7968,10 +7968,77 @@ - "warehouse" - "database" - "schema" - - "username" - - "password" - additionalProperties: false + additionalProperties: true properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + order: 0 + required: + - "client_id" + - "client_secret" + - "auth_type" + properties: + auth_type: + type: "string" + const: "OAuth" + default: "OAuth" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Snowflake developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Snowflake developer application." + airbyte_secret: true + order: 2 + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + order: 3 + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token for making authenticated requests." + airbyte_secret: true + order: 4 + - title: "Username and Password" + type: "object" + required: + - "username" + - "password" + - "auth_type" + order: 1 + properties: + auth_type: + type: "string" + const: "username/password" + default: "username/password" + order: 0 + username: + description: "The username you created to allow Airbyte to access\ + \ the database." + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 1 + password: + description: "The password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + order: 2 + order: 0 host: description: "The host domain of the snowflake instance (must include the\ \ account, region, cloud environment, and end with snowflakecomputing.com)." @@ -7979,58 +8046,96 @@ - "accountname.us-east-2.aws.snowflakecomputing.com" type: "string" title: "Account Name" - order: 0 + order: 1 role: description: "The role you created for Airbyte to access Snowflake." examples: - "AIRBYTE_ROLE" type: "string" title: "Role" - order: 1 + order: 2 warehouse: description: "The warehouse you created for Airbyte to access data." examples: - "AIRBYTE_WAREHOUSE" type: "string" title: "Warehouse" - order: 2 + order: 3 database: description: "The database you created for Airbyte to access data." examples: - "AIRBYTE_DATABASE" type: "string" title: "Database" - order: 3 + order: 4 schema: description: "The source Snowflake schema tables." examples: - "AIRBYTE_SCHEMA" type: "string" title: "Schema" - order: 4 - username: - description: "The username you created to allow Airbyte to access the database." - examples: - - "AIRBYTE_USER" - type: "string" - title: "Username" order: 5 - password: - description: "The password associated with the username." - type: "string" - airbyte_secret: true - title: "Password" - order: 6 jdbc_url_params: description: "Additional properties to pass to the JDBC URL string when\ \ connecting to the database formatted as 'key=value' pairs separated\ \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." title: "JDBC URL Params" type: "string" - order: 7 + order: 6 supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] + advanced_auth: + auth_flow_type: "oauth2.0" + predicate_key: + - "credentials" + - "auth_type" + predicate_value: "OAuth" + oauth_config_specification: + oauth_user_input_from_connector_config_specification: + type: "object" + additionalProperties: false + properties: + host: + type: "string" + path_in_connector_config: + - "host" + complete_oauth_output_specification: + type: "object" + additionalProperties: false + properties: + access_token: + type: "string" + path_in_connector_config: + - "credentials" + - "access_token" + refresh_token: + type: "string" + path_in_connector_config: + - "credentials" + - "refresh_token" + complete_oauth_server_input_specification: + type: "object" + additionalProperties: false + properties: + client_id: + type: "string" + client_secret: + type: "string" + complete_oauth_server_output_specification: + type: "object" + additionalProperties: false + properties: + client_id: + type: "string" + path_in_connector_config: + - "credentials" + - "client_id" + client_secret: + type: "string" + path_in_connector_config: + - "credentials" + - "client_secret" - dockerImage: "airbyte/source-square:0.1.4" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/square" diff --git a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java index 02aeaacb3fde..9bc01f2f7208 100644 --- a/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java +++ b/airbyte-integrations/bases/standard-source-test/src/main/java/io/airbyte/integrations/standardtest/source/AbstractSourceConnectorTest.java @@ -140,6 +140,13 @@ protected StandardCheckConnectionOutput runCheck() throws Exception { .run(new StandardCheckConnectionInput().withConnectionConfiguration(getConfig()), jobRoot); } + protected String runCheckAndGetStatusAsString(JsonNode config) throws Exception { + return new DefaultCheckConnectionWorker( + workerConfigs, + new AirbyteIntegrationLauncher(JOB_ID, JOB_ATTEMPT, getImageName(), processFactory, workerConfigs.getResourceRequirements())) + .run(new StandardCheckConnectionInput().withConnectionConfiguration(config), jobRoot).getStatus().toString(); + } + protected AirbyteCatalog runDiscover() throws Exception { return new DefaultDiscoverCatalogWorker( workerConfigs, diff --git a/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java b/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java index f85b4eebc0c5..1d6c237a3659 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java +++ b/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java @@ -289,7 +289,7 @@ public JdbcDatabase createDatabase(final JsonNode config) throws SQLException { final JsonNode jdbcConfig = toDatabaseConfig(config); final JdbcDatabase database = Databases.createStreamingJdbcDatabase( - jdbcConfig.get("username").asText(), + jdbcConfig.has("username") ? jdbcConfig.get("username").asText() : null, jdbcConfig.has("password") ? jdbcConfig.get("password").asText() : null, jdbcConfig.get("jdbc_url").asText(), driverClass, diff --git a/airbyte-integrations/connectors/source-snowflake/Dockerfile b/airbyte-integrations/connectors/source-snowflake/Dockerfile index 2756403f5960..f2d1461977b5 100644 --- a/airbyte-integrations/connectors/source-snowflake/Dockerfile +++ b/airbyte-integrations/connectors/source-snowflake/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-snowflake COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.10 +LABEL io.airbyte.version=0.1.11 LABEL io.airbyte.name=airbyte/source-snowflake diff --git a/airbyte-integrations/connectors/source-snowflake/README.md b/airbyte-integrations/connectors/source-snowflake/README.md index 124f7a6c5ccf..759a0a649388 100644 --- a/airbyte-integrations/connectors/source-snowflake/README.md +++ b/airbyte-integrations/connectors/source-snowflake/README.md @@ -13,10 +13,28 @@ "warehouse": "AIRBYTE_WAREHOUSE", "database": "AIRBYTE_DATABASE", "schema": "AIRBYTE_SCHEMA", - "username": "AIRBYTE_USER", - "password": "SOMEPASSWORD" + "credentails" { + "auth_type": "username/password", + "username": "AIRBYTE_USER", + "password": "SOMEPASSWORD" + } +} +``` +3. Create a file at `secrets/config_auth.json` with the following format: +``` +{ + "host": "ACCOUNT.REGION.PROVIDER.snowflakecomputing.com", + "role": "AIRBYTE_ROLE", + "warehouse": "AIRBYTE_WAREHOUSE", + "database": "AIRBYTE_DATABASE", + "schema": "AIRBYTE_SCHEMA", + "credentails" { + "auth_type": "OAuth", + "client_id": "client_id", + "client_secret": "client_secret", + "refresh_token": "refresh_token" + } } ``` - ## For Airbyte employees Put the contents of the `Snowflake Insert Test Creds` secret on Lastpass into `secrets/config.json` to be able to run integration tests locally. diff --git a/airbyte-integrations/connectors/source-snowflake/build.gradle b/airbyte-integrations/connectors/source-snowflake/build.gradle index 84f73f77e172..c641b62056d6 100644 --- a/airbyte-integrations/connectors/source-snowflake/build.gradle +++ b/airbyte-integrations/connectors/source-snowflake/build.gradle @@ -17,6 +17,7 @@ dependencies { implementation project(':airbyte-protocol:models') implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) implementation group: 'net.snowflake', name: 'snowflake-jdbc', version: '3.13.9' + implementation 'com.zaxxer:HikariCP:5.0.1' testImplementation testFixtures(project(':airbyte-integrations:connectors:source-jdbc')) testImplementation project(':airbyte-test-utils') diff --git a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeDataSourceUtils.java b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeDataSourceUtils.java new file mode 100644 index 000000000000..9d0351943116 --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeDataSourceUtils.java @@ -0,0 +1,197 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.snowflake; + +import static java.util.stream.Collectors.joining; + +import com.fasterxml.jackson.databind.JsonNode; +import com.zaxxer.hikari.HikariConfig; +import com.zaxxer.hikari.HikariDataSource; +import io.airbyte.commons.json.Jsons; +import java.io.IOException; +import java.net.URI; +import java.net.URLEncoder; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpRequest.BodyPublisher; +import java.net.http.HttpRequest.BodyPublishers; +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.TimeUnit; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class SnowflakeDataSourceUtils { + + public static final String OAUTH_METHOD = "OAuth"; + public static final String USERNAME_PASSWORD_METHOD = "username/password"; + public static final String UNRECOGNIZED = "Unrecognized"; + + private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeDataSourceUtils.class); + private static final int PAUSE_BETWEEN_TOKEN_REFRESH_MIN = 7; // snowflake access token's TTL is 10min and can't be modified + private static final String REFRESH_TOKEN_URL = "https://%s/oauth/token-request"; + private static final HttpClient httpClient = HttpClient.newBuilder() + .version(HttpClient.Version.HTTP_2) + .connectTimeout(Duration.ofSeconds(10)) + .build(); + + /** + * Snowflake OAuth access token expires in 10 minutes. For the cases when sync duration is more than + * 10 min, it requires updating 'token' property after the start of connection pool. + * HikariDataSource brings support for this requirement. + * + * @param config source config JSON + * @return datasource + */ + public static HikariDataSource createDataSource(final JsonNode config) { + HikariDataSource dataSource = new HikariDataSource(); + dataSource.setJdbcUrl(buildJDBCUrl(config)); + + if (config.has("credentials")) { + JsonNode credentials = config.get("credentials"); + final String authType = credentials.has("auth_type") ? credentials.get("auth_type").asText() : UNRECOGNIZED; + switch (authType) { + case OAUTH_METHOD -> { + LOGGER.info("Authorization mode is OAuth"); + dataSource.setDataSourceProperties(buildAuthProperties(config)); + // thread to keep the refresh token up to date + SnowflakeSource.SCHEDULED_EXECUTOR_SERVICE.scheduleAtFixedRate( + getAccessTokenTask(dataSource), + PAUSE_BETWEEN_TOKEN_REFRESH_MIN, PAUSE_BETWEEN_TOKEN_REFRESH_MIN, TimeUnit.MINUTES); + } + case USERNAME_PASSWORD_METHOD -> { + LOGGER.info("Authorization mode is 'Username and password'"); + populateUsernamePasswordConfig(dataSource, config.get("credentials")); + } + default -> throw new IllegalArgumentException("Unrecognized auth type: " + authType); + } + } else { + LOGGER.info("Authorization mode is deprecated 'Username and password'. Please update your source configuration"); + populateUsernamePasswordConfig(dataSource, config); + } + + return dataSource; + } + + /** + * Method to make request for a new access token using refresh token and client credentials. + * + * @return access token + */ + public static String getAccessTokenUsingRefreshToken(final String hostName, + final String clientId, + final String clientSecret, + final String refreshToken) + throws IOException { + final var refreshTokenUri = String.format(REFRESH_TOKEN_URL, hostName); + final Map requestBody = new HashMap<>(); + requestBody.put("grant_type", "refresh_token"); + requestBody.put("refresh_token", refreshToken); + + try { + final BodyPublisher bodyPublisher = BodyPublishers.ofString(requestBody.keySet().stream() + .map(key -> key + "=" + URLEncoder.encode(requestBody.get(key), StandardCharsets.UTF_8)) + .collect(joining("&"))); + + final byte[] authorization = Base64.getEncoder() + .encode((clientId + ":" + clientSecret).getBytes(StandardCharsets.UTF_8)); + + final HttpRequest request = HttpRequest.newBuilder() + .POST(bodyPublisher) + .uri(URI.create(refreshTokenUri)) + .header("Content-Type", "application/x-www-form-urlencoded") + .header("Accept", "application/json") + .header("Authorization", "Basic " + new String(authorization, StandardCharsets.UTF_8)) + .build(); + + final HttpResponse response = httpClient.send(request, + HttpResponse.BodyHandlers.ofString()); + + final JsonNode jsonResponse = Jsons.deserialize(response.body()); + if (jsonResponse.has("access_token")) { + return jsonResponse.get("access_token").asText(); + } else { + LOGGER.error("Failed to obtain accessToken using refresh token. " + jsonResponse); + throw new RuntimeException( + "Failed to obtain accessToken using refresh token."); + } + } catch (final InterruptedException e) { + throw new IOException("Failed to refreshToken", e); + } + } + + public static String buildJDBCUrl(JsonNode config) { + final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:snowflake://%s/?", + config.get("host").asText())); + + // Add required properties + jdbcUrl.append(String.format( + "role=%s&warehouse=%s&database=%s&schema=%s&JDBC_QUERY_RESULT_FORMAT=%s&CLIENT_SESSION_KEEP_ALIVE=%s", + config.get("role").asText(), + config.get("warehouse").asText(), + config.get("database").asText(), + config.get("schema").asText(), + // Needed for JDK17 - see + // https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow + "JSON", + true)); + + // https://docs.snowflake.com/en/user-guide/jdbc-configure.html#jdbc-driver-connection-string + if (config.has("jdbc_url_params")) { + jdbcUrl.append("&").append(config.get("jdbc_url_params").asText()); + } + return jdbcUrl.toString(); + } + + private static Runnable getAccessTokenTask(final HikariDataSource dataSource) { + return () -> { + LOGGER.info("Refresh token process started"); + var props = dataSource.getDataSourceProperties(); + try { + var token = getAccessTokenUsingRefreshToken(props.getProperty("host"), + props.getProperty("client_id"), props.getProperty("client_secret"), + props.getProperty("refresh_token")); + props.setProperty("token", token); + dataSource.setDataSourceProperties(props); + LOGGER.info("New access token has been obtained"); + } catch (IOException e) { + LOGGER.error("Failed to obtain a fresh accessToken:" + e); + } + }; + } + + public static Properties buildAuthProperties(JsonNode config) { + Properties properties = new Properties(); + try { + var credentials = config.get("credentials"); + properties.setProperty("client_id", credentials.get("client_id").asText()); + properties.setProperty("client_secret", credentials.get("client_secret").asText()); + properties.setProperty("refresh_token", credentials.get("refresh_token").asText()); + properties.setProperty("host", config.get("host").asText()); + properties.put("authenticator", "oauth"); + properties.put("account", config.get("host").asText()); + + String accessToken = getAccessTokenUsingRefreshToken( + config.get("host").asText(), credentials.get("client_id").asText(), + credentials.get("client_secret").asText(), credentials.get("refresh_token").asText()); + + properties.put("token", accessToken); + } catch (IOException e) { + LOGGER.error("Request access token was failed with error" + e.getMessage()); + } + return properties; + } + + private static void populateUsernamePasswordConfig(HikariConfig hikariConfig, JsonNode config) { + hikariConfig.setUsername(config.get("username").asText()); + hikariConfig.setPassword(config.get("password").asText()); + } + +} diff --git a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java index b404e4fc3b3b..33fe4f434671 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java +++ b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java @@ -4,14 +4,25 @@ package io.airbyte.integrations.source.snowflake; +import static io.airbyte.integrations.source.snowflake.SnowflakeDataSourceUtils.OAUTH_METHOD; +import static io.airbyte.integrations.source.snowflake.SnowflakeDataSourceUtils.UNRECOGNIZED; +import static io.airbyte.integrations.source.snowflake.SnowflakeDataSourceUtils.USERNAME_PASSWORD_METHOD; + import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.db.jdbc.StreamingJdbcDatabase; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; +import java.io.IOException; import java.sql.JDBCType; +import java.sql.SQLException; import java.util.Set; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import javax.sql.DataSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -19,54 +30,79 @@ public class SnowflakeSource extends AbstractJdbcSource implements Sou private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeSource.class); public static final String DRIVER_CLASS = "net.snowflake.client.jdbc.SnowflakeDriver"; + public static final ScheduledExecutorService SCHEDULED_EXECUTOR_SERVICE = Executors.newScheduledThreadPool(1); public SnowflakeSource() { - super(DRIVER_CLASS, new SnowflakeJdbcStreamingQueryConfiguration(), new SnowflakeSourceOperations()); + super(DRIVER_CLASS, new SnowflakeJdbcStreamingQueryConfiguration(), + new SnowflakeSourceOperations()); } public static void main(final String[] args) throws Exception { final Source source = new SnowflakeSource(); LOGGER.info("starting source: {}", SnowflakeSource.class); new IntegrationRunner(source).run(args); + SCHEDULED_EXECUTOR_SERVICE.shutdownNow(); LOGGER.info("completed source: {}", SnowflakeSource.class); } + @Override + public JdbcDatabase createDatabase(JsonNode config) throws SQLException { + final DataSource dataSource = SnowflakeDataSourceUtils.createDataSource(config); + var database = new StreamingJdbcDatabase(dataSource, new SnowflakeSourceOperations(), + new SnowflakeJdbcStreamingQueryConfiguration()); + quoteString = database.getMetaData().getIdentifierQuoteString(); + return database; + } + @Override public JsonNode toDatabaseConfig(final JsonNode config) { + final String jdbcUrl = SnowflakeDataSourceUtils.buildJDBCUrl(config); - final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:snowflake://%s/?", - config.get("host").asText())); + if (config.has("credentials")) { + JsonNode credentials = config.get("credentials"); + final String authType = + credentials.has("auth_type") ? credentials.get("auth_type").asText() : UNRECOGNIZED; + return switch (authType) { + case OAUTH_METHOD -> buildOAuthConfig(config, jdbcUrl); + case USERNAME_PASSWORD_METHOD -> buildUsernamePasswordConfig(config.get("credentials"), + jdbcUrl); + default -> throw new IllegalArgumentException("Unrecognized auth type: " + authType); + }; + } else { + return buildUsernamePasswordConfig(config, jdbcUrl); + } + } - // Add required properties - jdbcUrl.append(String.format("role=%s&warehouse=%s&database=%s&schema=%s&JDBC_QUERY_RESULT_FORMAT=%s&CLIENT_SESSION_KEEP_ALIVE=%s", - config.get("role").asText(), - config.get("warehouse").asText(), - config.get("database").asText(), - config.get("schema").asText(), - // Needed for JDK17 - see - // https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow - "JSON", - true)); + @Override + public Set getExcludedInternalNameSpaces() { + return Set.of( + "INFORMATION_SCHEMA"); + } - // https://docs.snowflake.com/en/user-guide/jdbc-configure.html#jdbc-driver-connection-string - if (config.has("jdbc_url_params")) { - jdbcUrl.append("&").append(config.get("jdbc_url_params").asText()); + private JsonNode buildOAuthConfig(JsonNode config, String jdbcUrl) { + final String accessToken; + var credentials = config.get("credentials"); + try { + accessToken = SnowflakeDataSourceUtils.getAccessTokenUsingRefreshToken( + config.get("host").asText(), credentials.get("client_id").asText(), + credentials.get("client_secret").asText(), credentials.get("refresh_token").asText()); + } catch (IOException e) { + throw new RuntimeException(e); } + final ImmutableMap.Builder configBuilder = ImmutableMap.builder() + .put("connection_properties", + String.join(";", "authenticator=oauth", "token=" + accessToken)) + .put("jdbc_url", jdbcUrl); + return Jsons.jsonNode(configBuilder.build()); + } - LOGGER.info(jdbcUrl.toString()); - + private JsonNode buildUsernamePasswordConfig(JsonNode config, String jdbcUrl) { final ImmutableMap.Builder configBuilder = ImmutableMap.builder() .put("username", config.get("username").asText()) .put("password", config.get("password").asText()) - .put("jdbc_url", jdbcUrl.toString()); - + .put("jdbc_url", jdbcUrl); + LOGGER.info(jdbcUrl); return Jsons.jsonNode(configBuilder.build()); } - @Override - public Set getExcludedInternalNameSpaces() { - return Set.of( - "INFORMATION_SCHEMA"); - } - } diff --git a/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json b/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json index 95b989811537..689926366c68 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json @@ -4,71 +4,183 @@ "$schema": "http://json-schema.org/draft-07/schema#", "title": "Snowflake Source Spec", "type": "object", - "required": [ - "host", - "role", - "warehouse", - "database", - "schema", - "username", - "password" - ], - "additionalProperties": false, + "required": ["host", "role", "warehouse", "database", "schema"], + "additionalProperties": true, "properties": { + "credentials": { + "title": "Authorization Method", + "type": "object", + "oneOf": [ + { + "type": "object", + "title": "OAuth2.0", + "order": 0, + "required": ["client_id", "client_secret", "auth_type"], + "properties": { + "auth_type": { + "type": "string", + "const": "OAuth", + "default": "OAuth", + "order": 0 + }, + "client_id": { + "type": "string", + "title": "Client ID", + "description": "The Client ID of your Snowflake developer application.", + "airbyte_secret": true, + "order": 1 + }, + "client_secret": { + "type": "string", + "title": "Client Secret", + "description": "The Client Secret of your Snowflake developer application.", + "airbyte_secret": true, + "order": 2 + }, + "access_token": { + "type": "string", + "title": "Access Token", + "description": "Access Token for making authenticated requests.", + "airbyte_secret": true, + "order": 3 + }, + "refresh_token": { + "type": "string", + "title": "Refresh Token", + "description": "Refresh Token for making authenticated requests.", + "airbyte_secret": true, + "order": 4 + } + } + }, + { + "title": "Username and Password", + "type": "object", + "required": ["username", "password", "auth_type"], + "order": 1, + "properties": { + "auth_type": { + "type": "string", + "const": "username/password", + "default": "username/password", + "order": 0 + }, + "username": { + "description": "The username you created to allow Airbyte to access the database.", + "examples": ["AIRBYTE_USER"], + "type": "string", + "title": "Username", + "order": 1 + }, + "password": { + "description": "The password associated with the username.", + "type": "string", + "airbyte_secret": true, + "title": "Password", + "order": 2 + } + } + } + ], + "order": 0 + }, "host": { "description": "The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com).", "examples": ["accountname.us-east-2.aws.snowflakecomputing.com"], "type": "string", "title": "Account Name", - "order": 0 + "order": 1 }, "role": { "description": "The role you created for Airbyte to access Snowflake.", "examples": ["AIRBYTE_ROLE"], "type": "string", "title": "Role", - "order": 1 + "order": 2 }, "warehouse": { "description": "The warehouse you created for Airbyte to access data.", "examples": ["AIRBYTE_WAREHOUSE"], "type": "string", "title": "Warehouse", - "order": 2 + "order": 3 }, "database": { "description": "The database you created for Airbyte to access data.", "examples": ["AIRBYTE_DATABASE"], "type": "string", "title": "Database", - "order": 3 + "order": 4 }, "schema": { "description": "The source Snowflake schema tables.", "examples": ["AIRBYTE_SCHEMA"], "type": "string", "title": "Schema", - "order": 4 - }, - "username": { - "description": "The username you created to allow Airbyte to access the database.", - "examples": ["AIRBYTE_USER"], - "type": "string", - "title": "Username", "order": 5 }, - "password": { - "description": "The password associated with the username.", - "type": "string", - "airbyte_secret": true, - "title": "Password", - "order": 6 - }, "jdbc_url_params": { "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", "title": "JDBC URL Params", "type": "string", - "order": 7 + "order": 6 + } + } + }, + "advanced_auth": { + "auth_flow_type": "oauth2.0", + "predicate_key": ["credentials", "auth_type"], + "predicate_value": "OAuth", + "oauth_config_specification": { + "oauth_user_input_from_connector_config_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "host": { + "type": "string", + "path_in_connector_config": ["host"] + } + } + }, + "complete_oauth_output_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "access_token": { + "type": "string", + "path_in_connector_config": ["credentials", "access_token"] + }, + "refresh_token": { + "type": "string", + "path_in_connector_config": ["credentials", "refresh_token"] + } + } + }, + "complete_oauth_server_input_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "client_id": { + "type": "string" + }, + "client_secret": { + "type": "string" + } + } + }, + "complete_oauth_server_output_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "client_id": { + "type": "string", + "path_in_connector_config": ["credentials", "client_id"] + }, + "client_secret": { + "type": "string", + "path_in_connector_config": ["credentials", "client_secret"] + } + } } } } diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeJdbcSourceAcceptanceTest.java index 011c4aad414b..9c81721ebc70 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeJdbcSourceAcceptanceTest.java @@ -4,19 +4,25 @@ package io.airbyte.integrations.io.airbyte.integration_tests.sources; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableSet; import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import io.airbyte.integrations.source.jdbc.test.JdbcSourceAcceptanceTest; import io.airbyte.integrations.source.snowflake.SnowflakeSource; +import io.airbyte.protocol.models.AirbyteConnectionStatus; +import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; import java.math.BigDecimal; import java.nio.file.Path; import java.sql.JDBCType; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; class SnowflakeJdbcSourceAcceptanceTest extends JdbcSourceAcceptanceTest { @@ -26,10 +32,6 @@ class SnowflakeJdbcSourceAcceptanceTest extends JdbcSourceAcceptanceTest { static void init() { snConfig = Jsons .deserialize(IOs.readFile(Path.of("secrets/config.json"))); - } - - @BeforeEach - public void setup() throws Exception { // due to case sensitiveness in SnowflakeDB SCHEMA_NAME = "JDBC_INTEGRATION_TEST1"; SCHEMA_NAME2 = "JDBC_INTEGRATION_TEST2"; @@ -49,7 +51,10 @@ public void setup() throws Exception { ID_VALUE_3 = new BigDecimal(3); ID_VALUE_4 = new BigDecimal(4); ID_VALUE_5 = new BigDecimal(5); + } + @BeforeEach + public void setup() throws Exception { super.setup(); } @@ -79,4 +84,11 @@ public AbstractJdbcSource getJdbcSource() { return new SnowflakeSource(); } + @Test + void testCheckFailure() throws Exception { + ((ObjectNode) config.get("credentials")).put("password", "fake"); + final AirbyteConnectionStatus actual = source.check(config); + assertEquals(Status.FAILED, actual.getStatus()); + } + } diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAcceptanceTest.java index 9f7047ea306c..9b676083f03e 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAcceptanceTest.java @@ -4,7 +4,10 @@ package io.airbyte.integrations.io.airbyte.integration_tests.sources; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.Lists; import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; @@ -26,6 +29,7 @@ import java.util.HashMap; import java.util.Map; import org.apache.commons.lang3.RandomStringUtils; +import org.junit.jupiter.api.Test; public class SnowflakeSourceAcceptanceTest extends SourceAcceptanceTest { @@ -35,8 +39,8 @@ public class SnowflakeSourceAcceptanceTest extends SourceAcceptanceTest { private static final String STREAM_NAME2 = "ID_AND_NAME2"; // config which refers to the schema that the test is being run in. - private JsonNode config; - private JdbcDatabase database; + protected JsonNode config; + protected JdbcDatabase database; @Override protected String getImageName() { @@ -90,17 +94,7 @@ protected JsonNode getState() { // for each test we create a new schema in the database. run the test in there and then remove it. @Override protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - config = Jsons.clone(getStaticConfig()); - database = Databases.createJdbcDatabase( - config.get("username").asText(), - config.get("password").asText(), - String.format("jdbc:snowflake://%s/", - config.get("host").asText()), - SnowflakeSource.DRIVER_CLASS, - Map.of("role", config.get("role").asText(), - "warehouse", config.get("warehouse").asText(), - "database", config.get("database").asText())); - + database = setupDataBase(); final String createSchemaQuery = String.format("CREATE SCHEMA IF NOT EXISTS %s", SCHEMA_NAME); final String createTableQuery1 = String .format("CREATE OR REPLACE TABLE %s.%s (ID INTEGER, NAME VARCHAR(200))", SCHEMA_NAME, @@ -130,4 +124,30 @@ protected void tearDown(final TestDestinationEnv testEnv) throws Exception { database.close(); } + protected JdbcDatabase setupDataBase() { + config = Jsons.clone(getStaticConfig()); + return Databases.createJdbcDatabase( + config.get("credentials").get("username").asText(), + config.get("credentials").get("password").asText(), + String.format("jdbc:snowflake://%s/", + config.get("host").asText()), + SnowflakeSource.DRIVER_CLASS, + Map.of("role", config.get("role").asText(), + "warehouse", config.get("warehouse").asText(), + "database", config.get("database").asText())); + } + + @Test + public void testBackwardCompatibilityAfterAddingOAuth() throws Exception { + final JsonNode deprecatedStyleConfig = Jsons.clone(config); + final JsonNode password = deprecatedStyleConfig.get("credentials").get("password"); + final JsonNode username = deprecatedStyleConfig.get("credentials").get("username"); + + ((ObjectNode) deprecatedStyleConfig).remove("credentials"); + ((ObjectNode) deprecatedStyleConfig).set("password", password); + ((ObjectNode) deprecatedStyleConfig).set("username", username); + + assertEquals("SUCCEEDED", runCheckAndGetStatusAsString(deprecatedStyleConfig).toUpperCase()); + } + } diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAuthAcceptanceTest.java b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAuthAcceptanceTest.java new file mode 100644 index 000000000000..bdcc57e9e08c --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceAuthAcceptanceTest.java @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.io.airbyte.integration_tests.sources; + +import com.fasterxml.jackson.databind.JsonNode; +import com.zaxxer.hikari.HikariDataSource; +import io.airbyte.commons.io.IOs; +import io.airbyte.commons.json.Jsons; +import io.airbyte.db.jdbc.JdbcDatabase; +import io.airbyte.db.jdbc.JdbcUtils; +import io.airbyte.db.jdbc.StreamingJdbcDatabase; +import io.airbyte.integrations.source.snowflake.SnowflakeDataSourceUtils; +import io.airbyte.integrations.source.snowflake.SnowflakeJdbcStreamingQueryConfiguration; +import java.io.IOException; +import java.nio.file.Path; +import java.util.Properties; +import javax.sql.DataSource; + +public class SnowflakeSourceAuthAcceptanceTest extends SnowflakeSourceAcceptanceTest { + + @Override + protected JdbcDatabase setupDataBase() { + config = getStaticConfig(); + final DataSource dataSource = createDataSource(getStaticConfig()); + return new StreamingJdbcDatabase(dataSource, + JdbcUtils.getDefaultSourceOperations(), + new SnowflakeJdbcStreamingQueryConfiguration()); + } + + private HikariDataSource createDataSource(final JsonNode config) { + HikariDataSource dataSource = new HikariDataSource(); + Properties properties = new Properties(); + + final StringBuilder jdbcUrl = new StringBuilder( + String.format("jdbc:snowflake://%s/?", config.get("host").asText())); + jdbcUrl.append(String.format( + "role=%s&warehouse=%s&database=%s&schema=%s&JDBC_QUERY_RESULT_FORMAT=%s&CLIENT_SESSION_KEEP_ALIVE=%s", + config.get("role").asText(), + config.get("warehouse").asText(), + config.get("database").asText(), + config.get("schema").asText(), + // Needed for JDK17 - see + // https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow + "JSON", + true)); + if (config.has("jdbc_url_params")) { + jdbcUrl.append(config.get("jdbc_url_params").asText()); + } + + var credentials = config.get("credentials"); + try { + properties.setProperty("client_id", credentials.get("client_id").asText()); + properties.setProperty("client_secret", credentials.get("client_secret").asText()); + properties.setProperty("refresh_token", credentials.get("refresh_token").asText()); + properties.setProperty("host", config.get("host").asText()); + var accessToken = SnowflakeDataSourceUtils.getAccessTokenUsingRefreshToken( + config.get("host").asText(), credentials.get("client_id").asText(), + credentials.get("client_secret").asText(), credentials.get("refresh_token").asText()); + properties.put("authenticator", "oauth"); + properties.put("token", accessToken); + } catch (IOException e) { + throw new RuntimeException(e); + } + + properties.put("warehouse", config.get("warehouse").asText()); + properties.put("account", config.get("host").asText()); + properties.put("role", config.get("role").asText()); + // allows queries to contain any number of statements + properties.put("MULTI_STATEMENT_COUNT", "0"); + // https://docs.snowflake.com/en/user-guide/jdbc-parameters.html#application + // identify airbyte traffic to snowflake to enable partnership & optimization opportunities + properties.put("dataSource.application", "airbyte"); + // Needed for JDK17 - see + // https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow + properties.put("JDBC_QUERY_RESULT_FORMAT", "JSON"); + + dataSource.setDriverClassName("net.snowflake.client.jdbc.SnowflakeDriver"); + dataSource.setJdbcUrl(jdbcUrl.toString()); + dataSource.setDataSourceProperties(properties); + return dataSource; + } + + JsonNode getStaticConfig() { + return Jsons + .deserialize(IOs.readFile(Path.of("secrets/config_auth.json"))); + } + + @Override + public void testBackwardCompatibilityAfterAddingOAuth() throws Exception { + // this test case is not valid for OAuth method + } +} diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceDatatypeTest.java b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceDatatypeTest.java index bf739e15246b..df49c9884d3f 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceDatatypeTest.java +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeSourceDatatypeTest.java @@ -51,8 +51,8 @@ protected Database setupDatabase() throws Exception { private Database getDatabase() { return Databases.createDatabase( - config.get("username").asText(), - config.get("password").asText(), + config.get("credentials").get("username").asText(), + config.get("credentials").get("password").asText(), String.format("jdbc:snowflake://%s/", config.get("host").asText()), SnowflakeSource.DRIVER_CLASS, diff --git a/airbyte-integrations/connectors/source-snowflake/src/test/java/io/airbyte/integrations/source/snowflake/SnowflakeDataSourceUtilsTest.java b/airbyte-integrations/connectors/source-snowflake/src/test/java/io/airbyte/integrations/source/snowflake/SnowflakeDataSourceUtilsTest.java new file mode 100644 index 000000000000..bf7080d82b0a --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/src/test/java/io/airbyte/integrations/source/snowflake/SnowflakeDataSourceUtilsTest.java @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.snowflake; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.commons.json.Jsons; +import org.junit.jupiter.api.Test; + +class SnowflakeDataSourceUtilsTest { + + private final String config = """ + { + "host": "host", + "role": "role", + "schema": "SOURCE_SCHEMA", + "database": "DATABASE", + "warehouse": "WAREHOUSE", + "credentials": { + "auth_type": "OAuth", + "client_id": "someid", + "access_token": "**********", + "client_secret": "clientSecret", + "refresh_token": "token" + } + } + """; + private final String expectedJdbcUrl = + "jdbc:snowflake://host/?role=role&warehouse=WAREHOUSE&database=DATABASE&schema=SOURCE_SCHEMA&JDBC_QUERY_RESULT_FORMAT=JSON&CLIENT_SESSION_KEEP_ALIVE=true"; + + @Test + void testBuildJDBCUrl() { + JsonNode expectedConfig = Jsons.deserialize(config); + + String jdbcURL = SnowflakeDataSourceUtils.buildJDBCUrl(expectedConfig); + + assertEquals(expectedJdbcUrl, jdbcURL); + } + + @Test + void testBuildJDBCUrlWithParams() { + JsonNode expectedConfig = Jsons.deserialize(config); + String params = "someParameter1¶m2=someParameter2"; + ((ObjectNode) expectedConfig).put("jdbc_url_params", params); + + String jdbcURL = SnowflakeDataSourceUtils.buildJDBCUrl(expectedConfig); + + assertEquals(expectedJdbcUrl + "&" + params, jdbcURL); + } + +} diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java index 9fb4056af3f5..06723d5b0ef3 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java @@ -63,6 +63,7 @@ public OAuthImplementationFactory(final ConfigRepository configRepository, final .put("airbyte/source-shopify", new ShopifyOAuthFlow(configRepository, httpClient)) .put("airbyte/source-tiktok-marketing", new TikTokMarketingOAuthFlow(configRepository, httpClient)) .put("airbyte/destination-snowflake", new DestinationSnowflakeOAuthFlow(configRepository, httpClient)) + .put("airbyte/source-snowflake", new SourceSnowflakeOAuthFlow(configRepository, httpClient)) .build(); } diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SourceSnowflakeOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SourceSnowflakeOAuthFlow.java new file mode 100644 index 000000000000..d9c976cf5ea3 --- /dev/null +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/SourceSnowflakeOAuthFlow.java @@ -0,0 +1,144 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.oauth.BaseOAuth2Flow; +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; +import java.util.function.Supplier; +import org.apache.http.client.utils.URIBuilder; + +public class SourceSnowflakeOAuthFlow extends BaseOAuth2Flow { + + private static final String AUTHORIZE_URL = "https://%s/oauth/authorize"; + private static final String ACCESS_TOKEN_URL = "https://%s/oauth/token-request"; + + public SourceSnowflakeOAuthFlow(ConfigRepository configRepository, HttpClient httpClient) { + super(configRepository, httpClient); + } + + @VisibleForTesting + public SourceSnowflakeOAuthFlow(ConfigRepository configRepository, HttpClient httpClient, final Supplier stateSupplier) { + super(configRepository, httpClient, stateSupplier); + } + + @Override + protected String formatConsentUrl(UUID definitionId, + String clientId, + String redirectUrl, + JsonNode inputOAuthConfiguration) + throws IOException { + try { + return new URIBuilder(String.format(AUTHORIZE_URL, extractUrl(inputOAuthConfiguration))) + .addParameter("client_id", clientId) + .addParameter("redirect_uri", redirectUrl) + .addParameter("response_type", "code") + .addParameter("state", getState()) + .build().toString(); + } catch (final URISyntaxException e) { + throw new IOException("Failed to format Consent URL for OAuth flow", e); + } + } + + @Override + protected String getAccessTokenUrl(JsonNode inputOAuthConfiguration) { + return String.format(ACCESS_TOKEN_URL, extractUrl(inputOAuthConfiguration)); + } + + @Override + protected String extractCodeParameter(Map queryParams) throws IOException { + return super.extractCodeParameter(queryParams); + } + + @Override + protected Map getAccessTokenQueryParameters(String clientId, + String clientSecret, + String authCode, + String redirectUrl) { + return ImmutableMap.builder() + // required + .put("grant_type", "authorization_code") + .put("code", authCode) + .put("redirect_uri", redirectUrl) + .build(); + } + + @Override + protected Map completeOAuthFlow(final String clientId, + final String clientSecret, + final String authCode, + final String redirectUrl, + final JsonNode inputOAuthConfiguration, + final JsonNode oAuthParamConfig) + throws IOException { + final var accessTokenUrl = getAccessTokenUrl(inputOAuthConfiguration); + final byte[] authorization = Base64.getEncoder() + .encode((clientId + ":" + clientSecret).getBytes(StandardCharsets.UTF_8)); + final HttpRequest request = HttpRequest.newBuilder() + .POST(HttpRequest.BodyPublishers + .ofString(tokenReqContentType.getConverter().apply( + getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl)))) + .uri(URI.create(accessTokenUrl)) + .header("Content-Type", tokenReqContentType.getContentType()) + .header("Accept", "application/json") + .header("Authorization", "Basic " + new String(authorization, StandardCharsets.UTF_8)) + .build(); + try { + final HttpResponse response = httpClient.send(request, + HttpResponse.BodyHandlers.ofString()); + + return extractOAuthOutput(Jsons.deserialize(response.body()), accessTokenUrl); + } catch (final InterruptedException e) { + throw new IOException("Failed to complete OAuth flow", e); + } + } + + @Override + protected Map extractOAuthOutput(JsonNode data, String accessTokenUrl) + throws IOException { + final Map result = new HashMap<>(); + // access_token is valid for only 10 minutes + if (data.has("access_token")) { + result.put("access_token", data.get("access_token").asText()); + } else { + throw new IOException(String.format("Missing 'access_token' in query params from %s", + accessTokenUrl)); + } + + if (data.has("refresh_token")) { + result.put("refresh_token", data.get("refresh_token").asText()); + } else { + throw new IOException(String.format("Missing 'refresh_token' in query params from %s", + accessTokenUrl)); + } + if (data.has("username")) { + result.put("username", data.get("username").asText()); + } else { + throw new IOException(String.format("Missing 'username' in query params from %s", + accessTokenUrl)); + } + return result; + } + + private String extractUrl(JsonNode inputOAuthConfiguration) { + var url = inputOAuthConfiguration.get("host"); + return url == null ? "snowflakecomputing.com" : url.asText(); + } + +} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnowflakeOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnowflakeOAuthFlowTest.java new file mode 100644 index 000000000000..e982170807f0 --- /dev/null +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/SnowflakeOAuthFlowTest.java @@ -0,0 +1,82 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.oauth.BaseOAuthFlow; +import io.airbyte.oauth.MoreOAuthParameters; +import java.util.Map; +import org.junit.jupiter.api.Test; + +public class SnowflakeOAuthFlowTest extends BaseOAuthFlowTest { + + @Override + protected BaseOAuthFlow getOAuthFlow() { + return new SourceSnowflakeOAuthFlow(getConfigRepository(), getHttpClient(), this::getConstantState); + } + + @Override + protected String getExpectedConsentUrl() { + return "https://account.aws.snowflakecomputing.com/oauth/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&response_type=code&state=state"; + } + + @Override + protected Map getExpectedOutput() { + return Map.of( + "access_token", "access_token_response", + "refresh_token", "refresh_token_response", + "username", "username"); + } + + @Override + protected JsonNode getCompleteOAuthOutputSpecification() { + return getJsonSchema(Map.of("access_token", Map.of("type", "string"), "refresh_token", Map.of("type", "string"))); + } + + @Override + protected Map getExpectedFilteredOutput() { + return Map.of( + "access_token", "access_token_response", + "refresh_token", "refresh_token_response", + "client_id", MoreOAuthParameters.SECRET_MASK); + } + + protected JsonNode getOAuthParamConfig() { + return Jsons.jsonNode(ImmutableMap.builder() + .put("client_id", "test_client_id") + .put("client_secret", "test_client_secret") + .build()); + } + + @Override + protected JsonNode getInputOAuthConfiguration() { + return Jsons.jsonNode(ImmutableMap.builder() + .put("host", "account.aws.snowflakecomputing.com") + .build()); + } + + protected JsonNode getUserInputFromConnectorConfigSpecification() { + return getJsonSchema(Map.of("host", Map.of("type", "string"))); + } + + @Test + @Override + public void testGetSourceConsentUrlEmptyOAuthSpec() {} + + @Test + @Override + public void testGetDestinationConsentUrlEmptyOAuthSpec() {} + + @Test + @Override + public void testDeprecatedCompleteDestinationOAuth() {} + + @Test + @Override + public void testDeprecatedCompleteSourceOAuth() {} + +} diff --git a/docs/integrations/sources/snowflake.md b/docs/integrations/sources/snowflake.md index 0c9a15e483ba..f500081c8b2a 100644 --- a/docs/integrations/sources/snowflake.md +++ b/docs/integrations/sources/snowflake.md @@ -72,10 +72,38 @@ You can limit this grant down to specific schemas instead of the whole database. Your database user should now be ready for use with Airbyte. +###Authentication +#### There are 2 way ways of oauth supported: login\pass and oauth2. + +### Login and Password +| Field | Description | +|---|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [Host](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html) | The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com). Example: `accountname.us-east-2.aws.snowflakecomputing.com` | +| [Role](https://docs.snowflake.com/en/user-guide/security-access-control-overview.html#roles) | The role you created in Step 1 for Airbyte to access Snowflake. Example: `AIRBYTE_ROLE` | +| [Warehouse](https://docs.snowflake.com/en/user-guide/warehouses-overview.html#overview-of-warehouses) | The warehouse you created in Step 1 for Airbyte to sync data into. Example: `AIRBYTE_WAREHOUSE` | +| [Database](https://docs.snowflake.com/en/sql-reference/ddl-database.html#database-schema-share-ddl) | The database you created in Step 1 for Airbyte to sync data into. Example: `AIRBYTE_DATABASE` | +| [Schema](https://docs.snowflake.com/en/sql-reference/ddl-database.html#database-schema-share-ddl) | The default schema used as the target schema for all statements issued from the connection that do not explicitly specify a schema name. | +| Username | The username you created in Step 2 to allow Airbyte to access the database. Example: `AIRBYTE_USER` | +| Password | The password associated with the username. | +| [JDBC URL Params](https://docs.snowflake.com/en/user-guide/jdbc-parameters.html) (Optional) | Additional properties to pass to the JDBC URL string when connecting to the database formatted as `key=value` pairs separated by the symbol `&`. Example: `key1=value1&key2=value2&key3=value3` | + + +### OAuth 2.0 +Field | Description | +|---|---| +| [Host](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html) | The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com). Example: `accountname.us-east-2.aws.snowflakecomputing.com` | +| [Role](https://docs.snowflake.com/en/user-guide/security-access-control-overview.html#roles) | The role you created in Step 1 for Airbyte to access Snowflake. Example: `AIRBYTE_ROLE` | +| [Warehouse](https://docs.snowflake.com/en/user-guide/warehouses-overview.html#overview-of-warehouses) | The warehouse you created in Step 1 for Airbyte to sync data into. Example: `AIRBYTE_WAREHOUSE` | +| [Database](https://docs.snowflake.com/en/sql-reference/ddl-database.html#database-schema-share-ddl) | The database you created in Step 1 for Airbyte to sync data into. Example: `AIRBYTE_DATABASE` | +| [Schema](https://docs.snowflake.com/en/sql-reference/ddl-database.html#database-schema-share-ddl) | The default schema used as the target schema for all statements issued from the connection that do not explicitly specify a schema name. | +| OAuth2 | The Login name and password to obtain auth token. | +| [JDBC URL Params](https://docs.snowflake.com/en/user-guide/jdbc-parameters.html) (Optional) | Additional properties to pass to the JDBC URL string when connecting to the database formatted as `key=value` pairs separated by the symbol `&`. Example: `key1=value1&key2=value2&key3=value3` | + ## Changelog | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| 0.1.11 | 2022-04-27 | [10953](https://github.com/airbytehq/airbyte/pull/10953) | Implement OAuth flow | | 0.1.9 | 2022-02-21 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Fixed cursor for old connectors that use non-microsecond format. Now connectors work with both formats | | 0.1.8 | 2022-02-18 | [10242](https://github.com/airbytehq/airbyte/pull/10242) | Updated timestamp transformation with microseconds | | 0.1.7 | 2022-02-14 | [10256](https://github.com/airbytehq/airbyte/pull/10256) | Add `-XX:+ExitOnOutOfMemoryError` JVM option | From c0c27f407cf04d31189431657c82e46328a1c53f Mon Sep 17 00:00:00 2001 From: Eric Date: Wed, 27 Apr 2022 19:00:40 +0200 Subject: [PATCH 27/39] Fix typo on `DATABASE_URL` comment (#12404) --- .env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.env b/.env index 056bc9558a38..428dc71fdc9b 100644 --- a/.env +++ b/.env @@ -40,7 +40,7 @@ DATABASE_PASSWORD=docker DATABASE_HOST=db DATABASE_PORT=5432 DATABASE_DB=airbyte -# translate manually DATABASE_URL=jdbc:postgresql://${DATABASE_HOST}:${DATABASE_PORT/${DATABASE_DB} (do not include the username or password here) +# translate manually DATABASE_URL=jdbc:postgresql://${DATABASE_HOST}:${DATABASE_PORT}/${DATABASE_DB} (do not include the username or password here) DATABASE_URL=jdbc:postgresql://db:5432/airbyte JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION=0.29.15.001 From d612b8abe5159f62f1416fcba5caa6011c424787 Mon Sep 17 00:00:00 2001 From: midavadim Date: Wed, 27 Apr 2022 20:33:15 +0300 Subject: [PATCH 28/39] :tada: Source tiktok marketing: fixed specs, updated docs (#12380) * specs: changed order for input params, marked required params * updated docs * updated specs test * updated connector version * auto-bump connector version * updated seed files Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 54 +- .../source-tiktok-marketing/Dockerfile | 2 +- .../integration_tests/spec.json | 36 +- .../source_tiktok_marketing/spec.py | 16 +- docs/integrations/sources/tiktok-marketing.md | 541 ++++++++++++++++-- 6 files changed, 554 insertions(+), 97 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 3ec994c85491..746412679cfb 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -799,7 +799,7 @@ - name: TikTok Marketing sourceDefinitionId: 4bfac00d-ce15-44ff-95b9-9e3c3e8fbd35 dockerRepository: airbyte/source-tiktok-marketing - dockerImageTag: 0.1.6 + dockerImageTag: 0.1.7 documentationUrl: https://docs.airbyte.io/integrations/sources/tiktok-marketing icon: tiktok.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 333e61f9bada..631af548711a 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -8636,7 +8636,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-tiktok-marketing:0.1.6" +- dockerImage: "airbyte/source-tiktok-marketing:0.1.7" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/tiktok-marketing" changelogUrl: "https://docs.airbyte.io/integrations/sources/tiktok-marketing" @@ -8644,31 +8644,10 @@ title: "TikTok Marketing Source Spec" type: "object" properties: - start_date: - title: "Start Date" - description: "The Start Date in format: YYYY-MM-DD. Any data before this\ - \ date will not be replicated. If this parameter is not set, all data\ - \ will be replicated." - default: "2016-09-01" - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" - order: 0 - type: "string" - report_granularity: - title: "Report Granularity" - description: "Which time granularity should be grouped by; for LIFETIME\ - \ there will be no grouping. This option is used for reports' streams\ - \ only." - default: "DAY" - enum: - - "LIFETIME" - - "DAY" - - "HOUR" - order: 1 - type: "string" credentials: - title: "Authorization Method" + title: "Authentication *" default: {} - order: 3 + order: 0 type: "object" oneOf: - title: "OAuth2.0" @@ -8678,8 +8657,6 @@ title: "Auth Type" const: "oauth2.0" order: 0 - enum: - - "oauth2.0" type: "string" app_id: title: "App ID" @@ -8707,8 +8684,6 @@ title: "Auth Type" const: "prod_access_token" order: 0 - enum: - - "prod_access_token" type: "string" app_id: title: "App ID" @@ -8735,8 +8710,6 @@ title: "Auth Type" const: "sandbox_access_token" order: 0 - enum: - - "sandbox_access_token" type: "string" advertiser_id: title: "Advertiser ID" @@ -8751,6 +8724,27 @@ required: - "advertiser_id" - "access_token" + start_date: + title: "Start Date *" + description: "The Start Date in format: YYYY-MM-DD. Any data before this\ + \ date will not be replicated. If this parameter is not set, all data\ + \ will be replicated." + default: "2016-09-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 1 + type: "string" + report_granularity: + title: "Report Granularity *" + description: "Which time granularity should be grouped by; for LIFETIME\ + \ there will be no grouping. This option is used for reports' streams\ + \ only." + default: "DAY" + enum: + - "LIFETIME" + - "DAY" + - "HOUR" + order: 2 + type: "string" supportsIncremental: true supportsNormalization: false supportsDBT: false diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile b/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile index 690dfa479c14..5524acd9717b 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile +++ b/airbyte-integrations/connectors/source-tiktok-marketing/Dockerfile @@ -32,5 +32,5 @@ COPY source_tiktok_marketing ./source_tiktok_marketing ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.6 +LABEL io.airbyte.version=0.1.7 LABEL io.airbyte.name=airbyte/source-tiktok-marketing diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/spec.json b/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/spec.json index bd914e8e5d62..da6cad26a536 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-tiktok-marketing/integration_tests/spec.json @@ -5,26 +5,10 @@ "title": "TikTok Marketing Source Spec", "type": "object", "properties": { - "start_date": { - "title": "Start Date", - "description": "The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated.", - "default": "2016-09-01", - "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$", - "order": 0, - "type": "string" - }, - "report_granularity": { - "title": "Report Granularity", - "description": "Which time granularity should be grouped by; for LIFETIME there will be no grouping. This option is used for reports' streams only.", - "default": "DAY", - "enum": ["LIFETIME", "DAY", "HOUR"], - "order": 1, - "type": "string" - }, "credentials": { - "title": "Authorization Method", + "title": "Authentication *", "default": {}, - "order": 3, + "order": 0, "type": "object", "oneOf": [ { @@ -113,6 +97,22 @@ "required": ["advertiser_id", "access_token"] } ] + }, + "start_date": { + "title": "Start Date *", + "description": "The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated.", + "default": "2016-09-01", + "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$", + "order": 1, + "type": "string" + }, + "report_granularity": { + "title": "Report Granularity *", + "description": "Which time granularity should be grouped by; for LIFETIME there will be no grouping. This option is used for reports' streams only.", + "default": "DAY", + "enum": ["LIFETIME", "DAY", "HOUR"], + "order": 2, + "type": "string" } } }, diff --git a/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/spec.py b/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/spec.py index 79019570f0ce..f33e829befff 100644 --- a/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/spec.py +++ b/airbyte-integrations/connectors/source-tiktok-marketing/source_tiktok_marketing/spec.py @@ -57,26 +57,26 @@ class SourceTiktokMarketingSpec(BaseModel): class Config: title = "TikTok Marketing Source Spec" + credentials: Union[OauthCredSpec, ProductionEnvSpec, SandboxEnvSpec] = Field( + title="Authentication *", order=0, default={}, type="object" + ) + start_date: str = Field( - title="Start Date", + title="Start Date *", default=DEFAULT_START_DATE, pattern="^[0-9]{4}-[0-9]{2}-[0-9]{2}$", description="The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. " "If this parameter is not set, all data will be replicated.", - order=0, + order=1, ) report_granularity: str = Field( - title="Report Granularity", + title="Report Granularity *", description="Which time granularity should be grouped by; for LIFETIME there will be no grouping. " "This option is used for reports' streams only.", default=ReportGranularity.default().value, enum=[g.value for g in ReportGranularity], - order=1, - ) - - credentials: Union[OauthCredSpec, ProductionEnvSpec, SandboxEnvSpec] = Field( - title="Authorization Method", order=3, default={}, type="object" + order=2, ) @classmethod diff --git a/docs/integrations/sources/tiktok-marketing.md b/docs/integrations/sources/tiktok-marketing.md index 0642ece8c5a4..9ae8455bfcad 100644 --- a/docs/integrations/sources/tiktok-marketing.md +++ b/docs/integrations/sources/tiktok-marketing.md @@ -1,64 +1,527 @@ # TikTok Marketing -## Overview +This page guides you through the process of setting up the TikTok Marketing source connector. -The [TikTok For Business Marketing API](https://ads.tiktok.com/marketing_api/homepage?rid=uvtbok1h19) allows you to directly interact with the TikTok Ads Manager platform for automated ad management and analysis. +## Prerequisites -The TikTok Marketing source supports both Full Refresh and Incremental syncs. You can choose if this connector will copy only the new or updated data, or all rows in the tables and columns you set up for replication, every time a sync is run. +For Production environment: +* Access token +* Secret +* App ID -This Source Connector is based on a [Airbyte CDK](https://docs.airbyte.io/connector-development/cdk-python). +For Sandbox environment: +* Access token +* Advertiser ID -### Streams information +* Start date +* Report Granularity (LIFETIME, DAY, HOUR) -| Stream | Environment | Granularities | Key | Incremental | Schema | -|:----------------------------------|--------------|-------------------|-------------|:---------------|-----------------------------------------------------------------------------------------------| -| Advertisers | Prod,Sandbox | LIFETIME,DAY,HOUR | id | No | [Link](https://business-api.tiktok.com/marketing_api/docs?id=1708503202263042) | -| AdGroups | Prod,Sandbox | LIFETIME,DAY,HOUR | adgroup_id | Yes (DAY,HOUR) | [Link](https://business-api.tiktok.com/marketing_api/docs?id=1708503489590273) | -| Ads | Prod,Sandbox | LIFETIME,DAY,HOUR | ad_id | Yes (DAY,HOUR) | [Link](https://business-api.tiktok.com/marketing_api/docs?id=1708572923161602) | -| Campaigns | Prod,Sandbox | LIFETIME,DAY,HOUR | campaign_id | Yes (DAY,HOUR) | [Link](https://business-api.tiktok.com/marketing_api/docs?id=1708582970809346) | -| AdsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | [BasicReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957200780290) | -| AdvertisersReports | Prod | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | [BasicReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957200780290) | -| AdGroupsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | [BasicReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957200780290) | -| CampaignsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | [BasicReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957200780290) | -| AdvertisersAudienceReports | Prod | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | [AudienceReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957217727489) | -| AdGroupAudienceReports | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | [AudienceReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957217727489) | -| AdsAudienceReports | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | [AudienceReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957217727489) | -| CampaignsAudienceReportsByCountry | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | [AudienceReportsLink](https://business-api.tiktok.com/marketing_api/docs?id=1707957217727489) | +## Step 1: Set up TikTok -If there are more endpoints you'd like Airbyte to support, please [create an issue.](https://github.com/airbytehq/airbyte/issues/new/choose) +1. Create a TikTok For Business account: [Link](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1702715936951297) +2. Create developer application: [Link](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1702716474845185) +3. For sandbox environment: create a Sandbox Ad Account [Link](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1701890920013825) -### Features +## Step 2: Set up the source connector in Airbyte -| Feature | Supported? | -| :--- | :--- | -| Full Refresh Sync | Yes | -| Incremental - Append Sync | Yes | -| SSL connection | Yes | -| Namespaces | No | +**For Airbyte Cloud:** -### Performance considerations +1. [Log into your Airbyte Cloud](https://cloud.airbyte.io/workspaces) account. +2. In the left navigation bar, click **Sources**. In the top-right corner, click **+ new source**. +3. On the source setup page, select **Tiktok Marketing** from the Source type dropdown and enter a name for this connector. +4. Select `OAuth2.0` Authorization method, then click `Authenticate your account`. +5. Log in and Authorize to the Tiktok account +6. Choose required Start date and report granularity +7. click `Set up source`. -The connector is restricted by [requests limitation](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1701890997610497). This connector should not run into TikTok Marketing API limitations under normal usage. Please [create an issue](https://github.com/airbytehq/airbyte/issues) if you see any rate limit issues that are not automatically retried successfully. +**For Airbyte OSS:** -## Getting started +1. Go to local Airbyte page. +2. In the left navigation bar, click **Sources**. In the top-right corner, click **+ new source**. +3. On the Set up the source page, enter the name for the connector and select **Tiktok Marketing** from the Source type dropdown. +4. Select `Production Access Token` or `Sandbox Access Token` Authorization method, then copy and paste info from step 1. +5. Choose required Start date and report granularity +6. Click `Set up source`. -### Requirements +## Supported streams and sync modes -* Access Token - This token will not expire. -* Production Environment - * App ID - * Secret -* SandBox Environment - * Advertiser ID - It is generated for sandbox in one copy +| Stream | Environment | Granularities | Key | Incremental | +|:----------------------------------|--------------|-------------------|-------------|:---------------| +| Advertisers | Prod,Sandbox | LIFETIME,DAY,HOUR | id | No | +| AdGroups | Prod,Sandbox | LIFETIME,DAY,HOUR | adgroup_id | Yes (DAY,HOUR) | +| Ads | Prod,Sandbox | LIFETIME,DAY,HOUR | ad_id | Yes (DAY,HOUR) | +| Campaigns | Prod,Sandbox | LIFETIME,DAY,HOUR | campaign_id | Yes (DAY,HOUR) | +| AdsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | +| AdvertisersReports | Prod | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | +| AdGroupsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | +| CampaignsReports | Prod,Sandbox | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | +| AdvertisersAudienceReports | Prod | LIFETIME,DAY,HOUR | None | Yes (DAY,HOUR) | +| AdGroupAudienceReports | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | +| AdsAudienceReports | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | +| CampaignsAudienceReportsByCountry | Prod,Sandbox | DAY,HOUR | None | Yes (DAY,HOUR) | -### Setup guide +**[Advertisers](https://ads.tiktok.com/marketing_api/docs?id=1708503202263042) Stream** +``` +{ + "contacter": "Ai***te", + "phonenumber": "+13*****5753", + "license_no": "", + "promotion_center_city": null, + "balance": 10, + "license_url": null, + "timezone": "Etc/GMT+8", + "reason": "", + "telephone": "+14*****6785", + "id": 7002238017842757633, + "language": "en", + "country": "US", + "role": "ROLE_ADVERTISER", + "license_province": null, + "display_timezone": "America/Los_Angeles", + "email": "i***************@**********", + "license_city": null, + "industry": "291905", + "create_time": 1630335591, + "promotion_center_province": null, + "address": "350 29th avenue, San Francisco", + "currency": "USD", + "promotion_area": "0", + "status": "STATUS_ENABLE", + "description": "https://", + "brand": null, + "name": "Airbyte0830", + "company": "Airbyte" +} +``` -Please read [How to get your AppID, Secret and Access Token](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1701890909484033) or [How to create a SandBox Environment](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1701890920013825) +**[AdGroups](https://ads.tiktok.com/marketing_api/docs?id=1708503489590273) Stream** +``` +{ + "placement_type": "PLACEMENT_TYPE_AUTOMATIC", + "budget": 20, + "budget_mode": "BUDGET_MODE_DAY", + "display_mode": null, + "schedule_infos": null, + "billing_event": "CPC", + "conversion_window": null, + "adgroup_name": "Ad Group20211020010107", + "interest_keywords": [], + "is_comment_disable": 0, + "rf_buy_type": null, + "frequency": null, + "bid_type": "BID_TYPE_NO_BID", + "placement": null, + "bid": 0, + "include_custom_actions": [], + "operation_system": [], + "pixel_id": null, + "dayparting": "111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111", + "app_type": null, + "conversion_id": 0, + "rf_predict_cpr": null, + "deep_bid_type": null, + "scheduled_budget": 0.0, + "adgroup_id": 1714125049901106, + "frequency_schedule": null, + "exclude_custom_actions": [], + "advertiser_id": 7002238017842757633, + "deep_cpabid": 0, + "is_new_structure": true, + "buy_impression": null, + "external_type": "WEBSITE", + "excluded_audience": [], + "deep_external_action": null, + "interest_category_v2": [], + "rf_predict_frequency": null, + "audience": [], + "pacing": "PACING_MODE_SMOOTH", + "brand_safety_partner": null, + "daily_retention_ratio": null, + "optimize_goal": "CLICK", + "enable_search_result": false, + "conversion_bid": 0, + "schedule_end_time": "2021-10-31 09:01:07", + "opt_status": "ENABLE", + "status": "ADGROUP_STATUS_CAMPAIGN_DISABLE", + "app_id": null, + "external_action": null, + "schedule_type": "SCHEDULE_START_END", + "brand_safety": "NO_BRAND_SAFETY", + "campaign_id": 1714125042508817, + "campaign_name": "Website Traffic20211020010104", + "split_test_adgroup_ids": [], + "action_v2": [], + "is_hfss": false, + "keywords": null, + "create_time": "2021-10-20 08:04:05", + "feed_type": null, + "languages": ["en"], + "enable_inventory_filter": false, + "device_price": [], + "location": [6252001], + "schedule_start_time": "2021-10-20 09:01:07", + "skip_learning_phase": 0, + "gender": "GENDER_UNLIMITED", + "creative_material_mode": "CUSTOM", + "app_download_url": null, + "device_models": [], + "automated_targeting": "OFF", + "connection_type": [], + "ios14_quota_type": "UNOCCUPIED", + "modify_time": "2022-03-24 12:06:54", + "category": 0, + "statistic_type": null, + "video_download": "ALLOW_DOWNLOAD", + "age": ["AGE_25_34", "AGE_35_44", "AGE_45_54"], + "buy_reach": null, + "is_share_disable": false +} +``` + +**[Ads](https://ads.tiktok.com/marketing_api/docs?id=1708572923161602) Stream** +``` +{ + "vast_moat": false, + "is_new_structure": true, + "campaign_name": "CampaignVadimTraffic", + "landing_page_urls": null, + "card_id": null, + "adgroup_id": 1728545385226289, + "campaign_id": 1728545382536225, + "status": "AD_STATUS_CAMPAIGN_DISABLE", + "brand_safety_postbid_partner": "UNSET", + "advertiser_id": 7002238017842757633, + "is_aco": false, + "ad_text": "Open-source\ndata integration for modern data teams", + "identity_id": "7080121820963422209", + "display_name": "airbyte", + "open_url": "", + "external_action": null, + "playable_url": "", + "create_time": "2022-03-28 12:09:09", + "product_ids": [], + "adgroup_name": "AdGroupVadim", + "fallback_type": "UNSET", + "creative_type": null, + "ad_name": "AdVadim-Optimized Version 3_202203281449_2022-03-28 05:03:44", + "video_id": "v10033g50000c90q1d3c77ub6e96fvo0", + "ad_format": "SINGLE_VIDEO", + "profile_image": "https://p21-ad-sg.ibyteimg.com/large/ad-site-i18n-sg/202203285d0de5c114d0690a462bb6a4", + "open_url_type": "NORMAL", + "click_tracking_url": null, + "page_id": null, + "ad_texts": null, + "landing_page_url": "https://airbyte.com", + "identity_type": "CUSTOMIZED_USER", + "avatar_icon_web_uri": "ad-site-i18n-sg/202203285d0de5c114d0690a462bb6a4", + "app_name": "", + "modify_time": "2022-03-28 21:34:26", + "opt_status": "ENABLE", + "call_to_action_id": "7080120957230238722", + "image_ids": ["v0201/7f371ff6f0764f8b8ef4f37d7b980d50"], + "ad_id": 1728545390695442, + "impression_tracking_url": null, + "is_creative_authorized": false +} +``` + +**[Campaigns](https://ads.tiktok.com/marketing_api/docs?id=1708582970809346) Stream** +``` +{ + "create_time": "2021-10-19 18:18:08", + "campaign_id": 1714073078669329, + "roas_bid": 0.0, + "advertiser_id": 7002238017842757633, + "modify_time": "2022-03-28 12:01:56", + "campaign_type": "REGULAR_CAMPAIGN", + "status": "CAMPAIGN_STATUS_DISABLE", + "objective_type": "TRAFFIC", + "split_test_variable": null, + "opt_status": "DISABLE", + "budget": 50, + "is_new_structure": true, + "deep_bid_type": null, + "campaign_name": "Website Traffic20211019110444", + "budget_mode": "BUDGET_MODE_DAY", + "objective": "LANDING_PAGE" +} +``` + +**AdsReports Stream - [BasicReports](https://ads.tiktok.com/marketing_api/docs?id=1707957200780290)** +``` +{ + "dimensions": { + "ad_id": 1728545390695442, + "stat_time_day": "2022-03-29 00:00:00" + }, + "metrics": { + "real_time_result_rate": 0.93, + "campaign_id": 1728545382536225, + "placement": "Automatic Placement", + "frequency": 1.17, + "cpc": 0.35, + "ctr": 0.93, + "cost_per_result": 0.3509, + "impressions": 6137, + "cost_per_conversion": 0, + "real_time_result": 57, + "adgroup_id": 1728545385226289, + "result_rate": 0.93, + "cost_per_1000_reached": 3.801, + "ad_text": "Open-source\ndata integration for modern data teams", + "spend": 20, + "conversion_rate": 0, + "real_time_cost_per_conversion": 0, + "promotion_type": "Website", + "tt_app_id": 0, + "real_time_cost_per_result": 0.3509, + "conversion": 0, + "secondary_goal_result": null, + "campaign_name": "CampaignVadimTraffic", + "cpm": 3.26, + "result": 57, + "ad_name": "AdVadim-Optimized Version 3_202203281449_2022-03-28 05:03:44", + "secondary_goal_result_rate": null, + "clicks": 57, + "reach": 5262, + "cost_per_secondary_goal_result": null, + "real_time_conversion": 0, + "real_time_conversion_rate": 0, + "mobile_app_id": "0", + "tt_app_name": "0", + "adgroup_name": "AdGroupVadim", + "dpa_target_audience_type": null + } +} +``` + +**AdvertisersReports Stream - [BasicReports](https://ads.tiktok.com/marketing_api/docs?id=1707957200780290)** +``` +{ + "metrics": { + "cpm": 5.43, + "impressions": 3682, + "frequency": 1.17, + "reach": 3156, + "cash_spend": 20, + "ctr": 1.14, + "spend": 20, + "cpc": 0.48, + "cost_per_1000_reached": 6.337, + "clicks": 42, + "voucher_spend": 0 + }, + "dimensions": { + "stat_time_day": "2022-03-30 00:00:00", + "advertiser_id": 7002238017842757633 + } +} + +``` + +**AdGroupsReports Stream - [BasicReports](https://ads.tiktok.com/marketing_api/docs?id=1707957200780290)** +``` +{ + "metrics": { + "real_time_conversion": 0, + "real_time_cost_per_conversion": 0, + "cost_per_1000_reached": 3.801, + "mobile_app_id": "0", + "reach": 5262, + "cpm": 3.26, + "conversion": 0, + "promotion_type": "Website", + "clicks": 57, + "real_time_result_rate": 0.93, + "real_time_conversion_rate": 0, + "cost_per_conversion": 0, + "dpa_target_audience_type": null, + "result": 57, + "cpc": 0.35, + "impressions": 6137, + "cost_per_result": 0.3509, + "tt_app_id": 0, + "cost_per_secondary_goal_result": null, + "frequency": 1.17, + "spend": 20, + "secondary_goal_result_rate": null, + "real_time_cost_per_result": 0.3509, + "real_time_result": 57, + "placement": "Automatic Placement", + "result_rate": 0.93, + "tt_app_name": "0", + "campaign_name": "CampaignVadimTraffic", + "secondary_goal_result": null, + "campaign_id": 1728545382536225, + "conversion_rate": 0, + "ctr": 0.93, + "adgroup_name": "AdGroupVadim" + }, + "dimensions": { + "adgroup_id": 1728545385226289, + "stat_time_day": "2022-03-29 00:00:00" + } +} +``` + +**CampaignsReports Stream - [BasicReports](https://ads.tiktok.com/marketing_api/docs?id=1707957200780290)** +``` +{ + "metrics": { + "cpc": 0.43, + "spend": 20, + "clicks": 46, + "cost_per_1000_reached": 4.002, + "impressions": 5870, + "ctr": 0.78, + "frequency": 1.17, + "cpm": 3.41, + "campaign_name": "CampaignVadimTraffic", + "reach": 4997 + }, + "dimensions": { + "campaign_id": 1728545382536225, + "stat_time_day": "2022-03-28 00:00:00" + } +} + +``` + +**AdsAudienceReports Stream - [AudienceReports](https://ads.tiktok.com/marketing_api/docs?id=1707957217727489)** +``` +{ + { + "result": 17, + "clicks": 17, + "real_time_conversion_rate": 0, + "adgroup_id": 1728545385226289, + "cpm": 3.01, + "cost_per_result": 0.4165, + "real_time_cost_per_result": 0.4165, + "mobile_app_id": 0, + "spend": 7.08, + "cpc": 0.42, + "placement": "Automatic Placement", + "real_time_conversion": 0, + "dpa_target_audience_type": null, + "real_time_result_rate": 0.72, + "adgroup_name": "AdGroupVadim", + "tt_app_id": 0, + "ctr": 0.72, + "ad_text": "Open-source\ndata integration for modern data teams", + "result_rate": 0.72, + "ad_name": "AdVadim-Optimized Version 3_202203281449_2022-03-28 05:03:44", + "conversion_rate": 0, + "real_time_result": 17, + "tt_app_name": "0", + "cost_per_conversion": 0, + "real_time_cost_per_conversion": 0, + "conversion": 0, + "impressions": 2350, + "promotion_type": "Website", + "campaign_id": 1728545382536225, + "campaign_name": "CampaignVadimTraffic" + }, + "dimensions": { + "gender": "MALE", + "age": "AGE_25_34", + "ad_id": 1728545390695442, + "stat_time_day": "2022-03-28 00:00:00" + } +} +``` + +**AdvertisersAudienceReports Stream - [AudienceReports](https://ads.tiktok.com/marketing_api/docs?id=1707957217727489)** +``` +{ + "dimensions": { + "stat_time_day": "2022-03-28 00:00:00", + "gender": "FEMALE", + "advertiser_id": 7002238017842757633, + "age": "AGE_35_44" + }, + "metrics": { + "spend": 3.09, + "ctr": 0.93, + "cpc": 0.44, + "clicks": 7, + "cpm": 4.11, + "impressions": 752 + } +} +``` + +**AdGroupAudienceReports Stream - [AudienceReports](https://ads.tiktok.com/marketing_api/docs?id=1707957217727489)** +``` +{ + "dimensions": { + "gender": "MALE", + "age": "AGE_25_34", + "stat_time_day": "2022-03-29 00:00:00", + "adgroup_id": 1728545385226289 + }, + "metrics": { + "cost_per_conversion": 0, + "campaign_id": 1728545382536225, + "campaign_name": "CampaignVadimTraffic", + "clicks": 20, + "dpa_target_audience_type": null, + "mobile_app_id": 0, + "promotion_type": "Website", + "conversion_rate": 0, + "cpm": 3.9, + "cost_per_result": 0.3525, + "cpc": 0.35, + "real_time_cost_per_conversion": 0, + "ctr": 1.11, + "spend": 7.05, + "result": 20, + "real_time_result": 20, + "impressions": 1806, + "conversion": 0, + "real_time_result_rate": 1.11, + "real_time_conversion_rate": 0, + "real_time_conversion": 0, + "adgroup_name": "AdGroupVadim", + "tt_app_name": "0", + "placement": "Automatic Placement", + "real_time_cost_per_result": 0.3525, + "result_rate": 1.11, + "tt_app_id": 0 + } +} +``` + +**CampaignsAudienceReportsByCountry Stream - [AudienceReports](https://ads.tiktok.com/marketing_api/docs?id=1707957217727489)** +``` +{ + "metrics": { + "impressions": 5870, + "campaign_name": "CampaignVadimTraffic", + "cpm": 3.41, + "clicks": 46, + "spend": 20, + "ctr": 0.78, + "cpc": 0.43 + }, + "dimensions": { + "stat_time_day": "2022-03-28 00:00:00", + "campaign_id": 1728545382536225, + "country_code": "US" + } +} + +``` + +## Performance considerations + +The connector is restricted by [requests limitation](https://ads.tiktok.com/marketing_api/docs?rid=fgvgaumno25&id=1725359439428610). This connector should not run into TikTok Marketing API limitations under normal usage. Please [create an issue](https://github.com/airbytehq/airbyte/issues) if you see any rate limit issues that are not automatically retried successfully. ## Changelog | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------------------------------------------| +| 0.1.7 | 2022-04-27 | [12380](https://github.com/airbytehq/airbyte/pull/12380) | fixed spec descriptions and documentation | | 0.1.6 | 2022-04-19 | [11378](https://github.com/airbytehq/airbyte/pull/11378) | updated logic for stream initializations, fixed errors in schemas, updated SAT and unit tests | | 0.1.5 | 2022-02-17 | [10398](https://github.com/airbytehq/airbyte/pull/10398) | Add Audience reports | | 0.1.4 | 2021-12-30 | [7636](https://github.com/airbytehq/airbyte/pull/7636) | Add OAuth support | From 2eb93560e62eab9ed027f1f68e04f9f115ef451d Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Wed, 27 Apr 2022 20:41:43 +0300 Subject: [PATCH 29/39] Source Smartsheets: incremental read and tests (#12077) * #5520 fix scrambled columns bug * #5520 source smartsheets: add changelog item * #5520 move pytest to optional setup requirements * #12003 source smartsheets: implement incremental read + tests * #12003 source smartsheet: add changelog * #12003 source smartsheets: fix merge conflict on unit tests * #12003 source smartsheets: fix startdate in spec * #12003 source smartsheets: add default start dt to spec * #12003 source smartsheets: add default start dt to spec * auto-bump connector version Co-authored-by: Octavia Squidington III --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 11 +- .../connectors/source-smartsheets/Dockerfile | 2 +- .../acceptance-test-config.yml | 10 + .../integration_tests/abnormal_state.json | 5 + .../integration_tests/configured_catalog.json | 3 +- .../integration_tests/expected_records.txt | 100 +++++++ .../source_smartsheets/sheet.py | 92 +++++++ .../source_smartsheets/source.py | 125 +-------- .../source_smartsheets/spec.json | 8 + .../source_smartsheets/streams.py | 58 ++++ .../source-smartsheets/unit_tests/conftest.py | 34 +++ .../unit_tests/response.json | 251 ++++++++++++++++++ .../unit_tests/test_sheets.py | 119 +++++++++ .../unit_tests/test_source.py | 44 +-- .../unit_tests/test_streams.py | 21 ++ docs/integrations/sources/smartsheets.md | 9 +- 17 files changed, 741 insertions(+), 153 deletions(-) create mode 100644 airbyte-integrations/connectors/source-smartsheets/integration_tests/abnormal_state.json create mode 100644 airbyte-integrations/connectors/source-smartsheets/integration_tests/expected_records.txt create mode 100644 airbyte-integrations/connectors/source-smartsheets/source_smartsheets/sheet.py create mode 100644 airbyte-integrations/connectors/source-smartsheets/source_smartsheets/streams.py create mode 100644 airbyte-integrations/connectors/source-smartsheets/unit_tests/conftest.py create mode 100644 airbyte-integrations/connectors/source-smartsheets/unit_tests/response.json create mode 100644 airbyte-integrations/connectors/source-smartsheets/unit_tests/test_sheets.py create mode 100644 airbyte-integrations/connectors/source-smartsheets/unit_tests/test_streams.py diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 746412679cfb..d642115f0f67 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -737,7 +737,7 @@ - name: Smartsheets sourceDefinitionId: 374ebc65-6636-4ea0-925c-7d35999a8ffc dockerRepository: airbyte/source-smartsheets - dockerImageTag: 0.1.9 + dockerImageTag: 0.1.10 documentationUrl: https://docs.airbyte.io/integrations/sources/smartsheets icon: smartsheet.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 631af548711a..f3e3f72075ba 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -7856,7 +7856,7 @@ oauthFlowOutputParameters: - - "access_token" - - "refresh_token" -- dockerImage: "airbyte/source-smartsheets:0.1.9" +- dockerImage: "airbyte/source-smartsheets:0.1.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/smartsheets" connectionSpecification: @@ -7878,6 +7878,15 @@ title: "Sheet ID" description: "The spreadsheet ID. Find in the spreadsheet menu: File > Properties" type: "string" + start_datetime: + title: "Start Datetime" + type: "string" + examples: + - "2000-01-01T13:00:00" + - "2000-01-01T13:00:00-07:00" + description: "ISO 8601, for instance: `YYYY-MM-DDTHH:MM:SS`, `YYYY-MM-DDTHH:MM:SS+HH:MM`" + format: "date-time" + default: "2020-01-01T00:00:00+00:00" supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] diff --git a/airbyte-integrations/connectors/source-smartsheets/Dockerfile b/airbyte-integrations/connectors/source-smartsheets/Dockerfile index 7907f022cc86..cb26f971e9da 100644 --- a/airbyte-integrations/connectors/source-smartsheets/Dockerfile +++ b/airbyte-integrations/connectors/source-smartsheets/Dockerfile @@ -14,5 +14,5 @@ COPY $CODE_PATH ./$CODE_PATH ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.9 +LABEL io.airbyte.version=0.1.10 LABEL io.airbyte.name=airbyte/source-smartsheets diff --git a/airbyte-integrations/connectors/source-smartsheets/acceptance-test-config.yml b/airbyte-integrations/connectors/source-smartsheets/acceptance-test-config.yml index 54c4a0e8df86..063f068e5caa 100644 --- a/airbyte-integrations/connectors/source-smartsheets/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-smartsheets/acceptance-test-config.yml @@ -12,6 +12,16 @@ tests: basic_read: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + expect_records: + path: "integration_tests/expected_records.txt" + extra_fields: yes + exact_order: yes + extra_records: no full_refresh: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" + incremental: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" diff --git a/airbyte-integrations/connectors/source-smartsheets/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-smartsheets/integration_tests/abnormal_state.json new file mode 100644 index 000000000000..461ef6d45b57 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "aws_s3_sample": { + "modifiedAt": "2222-03-07T11:30:00+00:00" + } +} diff --git a/airbyte-integrations/connectors/source-smartsheets/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-smartsheets/integration_tests/configured_catalog.json index e263f7cae208..f919a67cd985 100644 --- a/airbyte-integrations/connectors/source-smartsheets/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-smartsheets/integration_tests/configured_catalog.json @@ -13,7 +13,8 @@ "gender": { "type": "string" }, "ip_address": { "type": "string" }, "primary_email": { "type": "string" }, - "dob": { "type": "string", "format": "date" } + "dob": { "type": "string", "format": "date" }, + "modifiedAt": { "type": "string", "format": "date-time" } } }, "supported_sync_modes": ["full_refresh"] diff --git a/airbyte-integrations/connectors/source-smartsheets/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-smartsheets/integration_tests/expected_records.txt new file mode 100644 index 000000000000..7d1e55999bc1 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/integration_tests/expected_records.txt @@ -0,0 +1,100 @@ +{"stream": "aws_s3_sample", "data": {"id": "1.0", "first_name": "Joni", "last_name": "Watling", "email": "jwatling0@amazonaws.com", "gender": "Genderqueer", "ip_address": "195.50.216.194", "dob": "2020-11-23"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "2.0", "first_name": "Bernardo", "last_name": "Klaaassen", "email": "bklaaassen1@cbc.ca", "gender": "Polygender", "ip_address": "116.208.253.97", "dob": "2020-02-22"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "3.0", "first_name": "Drake", "last_name": "Bednell", "email": "dbednell2@theguardian.com", "gender": "Non-binary", "ip_address": "120.15.24.132", "dob": "2020-08-21"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "4.0", "first_name": "Alfreda", "last_name": "Brumbye", "email": "abrumbye3@howstuffworks.com", "gender": "Genderqueer", "ip_address": "64.22.217.122", "dob": "2020-12-29"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "5.0", "first_name": "Boycey", "last_name": "Brisson", "email": "bbrisson4@bizjournals.com", "gender": "Bigender", "ip_address": "59.220.127.45", "dob": "2020-06-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "6.0", "first_name": "Ursuline", "last_name": "Lintott", "email": "ulintott5@ow.ly", "gender": "Genderqueer", "ip_address": "47.253.138.238", "dob": "2020-07-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "7.0", "first_name": "Bettine", "last_name": "McKennan", "email": "bmckennan6@census.gov", "gender": "Bigender", "ip_address": "35.42.88.34", "dob": "2020-06-12"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "8.0", "first_name": "Eustace", "last_name": "Aaronsohn", "email": "eaaronsohn7@yale.edu", "gender": "Male", "ip_address": "84.153.189.160", "dob": "2020-12-14"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "9.0", "first_name": "Chrystel", "last_name": "Blum", "email": "cblum8@360.cn", "gender": "Bigender", "ip_address": "44.5.17.116", "dob": "2020-09-14"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "10.0", "first_name": "Kathryne", "last_name": "Cuncarr", "email": "kcuncarr9@hhs.gov", "gender": "Female", "ip_address": "50.63.175.212", "dob": "2020-06-24"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "11.0", "first_name": "Filmer", "last_name": "Ginni", "email": "fginnia@ucoz.com", "gender": "Genderfluid", "ip_address": "248.137.123.63", "dob": "2020-12-30"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "12.0", "first_name": "Anthiathia", "last_name": "Sketh", "email": "askethb@1688.com", "gender": "Female", "ip_address": "40.58.34.216", "dob": "2020-05-10"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "13.0", "first_name": "Pamella", "last_name": "Winterson", "email": "pwintersonc@biglobe.ne.jp", "gender": "Female", "ip_address": "173.8.175.104", "dob": "2020-06-28"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "14.0", "first_name": "Zuzana", "last_name": "Esmead", "email": "zesmeadd@bloglovin.com", "gender": "Polygender", "ip_address": "98.192.39.217", "dob": "2020-02-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "15.0", "first_name": "Donica", "last_name": "Jozaitis", "email": "djozaitise@amazon.de", "gender": "Female", "ip_address": "160.231.57.131", "dob": "2021-01-04"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "16.0", "first_name": "Pennie", "last_name": "Dunrige", "email": "pdunrigef@gravatar.com", "gender": "Non-binary", "ip_address": "208.255.160.56", "dob": "2020-03-01"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "17.0", "first_name": "Blanca", "last_name": "Allcroft", "email": "ballcroftg@furl.net", "gender": "Agender", "ip_address": "21.129.47.109", "dob": "2021-01-01"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "18.0", "first_name": "Webb", "last_name": "Simkins", "email": "wsimkinsh@qq.com", "gender": "Male", "ip_address": "2.125.148.89", "dob": "2020-06-16"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "19.0", "first_name": "Dorrie", "last_name": "Esser", "email": "desseri@rediff.com", "gender": "Female", "ip_address": "17.148.200.84", "dob": "2020-11-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "20.0", "first_name": "Kara", "last_name": "Gley", "email": "kgleyj@php.net", "gender": "Bigender", "ip_address": "117.130.134.124", "dob": "2020-12-02"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "21.0", "first_name": "Felicle", "last_name": "Roscrigg", "email": "froscriggk@java.com", "gender": "Female", "ip_address": "36.67.5.211", "dob": "2020-03-05"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "22.0", "first_name": "Carmine", "last_name": "Backshill", "email": "cbackshilll@addthis.com", "gender": "Polygender", "ip_address": "103.28.140.64", "dob": "2020-12-13"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "23.0", "first_name": "Helge", "last_name": "Kneeshaw", "email": "hkneeshawm@goo.gl", "gender": "Genderfluid", "ip_address": "154.154.89.226", "dob": "2020-07-15"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "24.0", "first_name": "Suzy", "last_name": "Ohm", "email": "sohmn@columbia.edu", "gender": "Bigender", "ip_address": "100.54.193.73", "dob": "2020-10-25"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "25.0", "first_name": "Bianka", "last_name": "Melmore", "email": "bmelmoreo@sohu.com", "gender": "Genderqueer", "ip_address": "38.63.204.171", "dob": "2020-11-02"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "26.0", "first_name": "Kim", "last_name": "Joust", "email": "kjoustp@sbwire.com", "gender": "Male", "ip_address": "87.176.59.210", "dob": "2020-12-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "27.0", "first_name": "Darrin", "last_name": "Warlawe", "email": "dwarlaweq@shinystat.com", "gender": "Male", "ip_address": "138.16.204.148", "dob": "2020-11-17"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "28.0", "first_name": "Edgard", "last_name": "Byfford", "email": "ebyffordr@spotify.com", "gender": "Polygender", "ip_address": "162.208.75.173", "dob": "2020-07-25"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "29.0", "first_name": "Dee", "last_name": "Bourgeois", "email": "dbourgeoiss@elegantthemes.com", "gender": "Polygender", "ip_address": "20.250.26.143", "dob": "2020-10-20"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "30.0", "first_name": "Fredrika", "last_name": "Ingry", "email": "fingryt@slashdot.org", "gender": "Non-binary", "ip_address": "255.214.102.98", "dob": "2020-04-03"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "31.0", "first_name": "Christie", "last_name": "Krier", "email": "ckrieru@aboutads.info", "gender": "Bigender", "ip_address": "29.122.167.180", "dob": "2020-09-24"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "32.0", "first_name": "Joshuah", "last_name": "Braffington", "email": "jbraffingtonv@foxnews.com", "gender": "Agender", "ip_address": "189.155.6.135", "dob": "2020-09-22"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "33.0", "first_name": "Bailie", "last_name": "Fossey", "email": "bfosseyw@flickr.com", "gender": "Agender", "ip_address": "129.166.4.82", "dob": "2020-05-30"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "34.0", "first_name": "Westley", "last_name": "Kupper", "email": "wkupperx@a8.net", "gender": "Agender", "ip_address": "12.125.54.217", "dob": "2020-04-10"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "35.0", "first_name": "Allie", "last_name": "Moogan", "email": "amoogany@jigsy.com", "gender": "Male", "ip_address": "158.225.146.105", "dob": "2020-12-24"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "36.0", "first_name": "Obadias", "last_name": "Stammers", "email": "ostammersz@shinystat.com", "gender": "Polygender", "ip_address": "210.226.250.161", "dob": "2021-01-01"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "37.0", "first_name": "Philippine", "last_name": "Barhems", "email": "pbarhems10@ted.com", "gender": "Male", "ip_address": "169.205.179.145", "dob": "2021-01-14"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "38.0", "first_name": "Theo", "last_name": "Messitt", "email": "tmessitt11@deviantart.com", "gender": "Male", "ip_address": "103.212.77.16", "dob": "2020-09-20"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "39.0", "first_name": "Roch", "last_name": "Cuphus", "email": "rcuphus12@pinterest.com", "gender": "Agender", "ip_address": "43.96.220.113", "dob": "2020-12-20"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "40.0", "first_name": "Sinclair", "last_name": "Chittey", "email": "schittey13@tamu.edu", "gender": "Genderfluid", "ip_address": "128.194.26.163", "dob": "2020-10-10"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "41.0", "first_name": "Eleonore", "last_name": "Guerrieri", "email": "eguerrieri14@typepad.com", "gender": "Genderfluid", "ip_address": "79.210.103.73", "dob": "2020-07-17"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "42.0", "first_name": "Elana", "last_name": "Secret", "email": "esecret15@mysql.com", "gender": "Polygender", "ip_address": "102.139.145.231", "dob": "2021-01-15"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "43.0", "first_name": "Dennie", "last_name": "Prati", "email": "dprati16@nytimes.com", "gender": "Genderqueer", "ip_address": "51.119.24.56", "dob": "2020-10-06"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "44.0", "first_name": "Roderick", "last_name": "Dand", "email": "rdand17@gmpg.org", "gender": "Genderqueer", "ip_address": "188.187.179.115", "dob": "2020-11-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "45.0", "first_name": "Lonnie", "last_name": "Grigolashvill", "email": "lgrigolashvill18@hhs.gov", "gender": "Non-binary", "ip_address": "96.104.221.230", "dob": "2020-05-05"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "46.0", "first_name": "Leslie", "last_name": "Iddins", "email": "liddins19@sbwire.com", "gender": "Genderqueer", "ip_address": "77.228.177.247", "dob": "2020-06-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "47.0", "first_name": "Conant", "last_name": "Gaishson", "email": "cgaishson1a@oakley.com", "gender": "Agender", "ip_address": "71.118.171.42", "dob": "2020-12-29"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "48.0", "first_name": "Aileen", "last_name": "Derrell", "email": "aderrell1b@amazonaws.com", "gender": "Genderfluid", "ip_address": "233.79.86.81", "dob": "2020-06-02"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "49.0", "first_name": "Heywood", "last_name": "Poulston", "email": "hpoulston1c@opera.com", "gender": "Genderqueer", "ip_address": "115.6.245.150", "dob": "2020-04-16"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "50.0", "first_name": "Neddie", "last_name": "Rickert", "email": "nrickert1d@omniture.com", "gender": "Polygender", "ip_address": "25.55.171.143", "dob": "2020-11-17"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "51.0", "first_name": "Ronny", "last_name": "Bondley", "email": "rbondley1e@loc.gov", "gender": "Genderqueer", "ip_address": "33.164.53.233", "dob": "2020-05-22"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "52.0", "first_name": "Filippa", "last_name": "McCuis", "email": "fmccuis1f@desdev.cn", "gender": "Bigender", "ip_address": "30.78.184.43", "dob": "2021-01-12"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "53.0", "first_name": "Kipper", "last_name": "Corton", "email": "kcorton1g@t.co", "gender": "Bigender", "ip_address": "177.22.101.164", "dob": "2021-01-23"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "54.0", "first_name": "Clementine", "last_name": "Callen", "email": "ccallen1h@storify.com", "gender": "Genderfluid", "ip_address": "122.40.201.54", "dob": "2020-06-03"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "55.0", "first_name": "Silvie", "last_name": "Royse", "email": "sroyse1i@mapquest.com", "gender": "Genderqueer", "ip_address": "38.145.193.0", "dob": "2020-06-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "56.0", "first_name": "Noble", "last_name": "Purslow", "email": "npurslow1j@redcross.org", "gender": "Non-binary", "ip_address": "119.89.26.248", "dob": "2020-11-21"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "57.0", "first_name": "Marjy", "last_name": "Gloves", "email": "mgloves1k@drupal.org", "gender": "Genderqueer", "ip_address": "250.108.63.170", "dob": "2020-07-24"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "58.0", "first_name": "Ellwood", "last_name": "Gullam", "email": "egullam1l@google.cn", "gender": "Genderfluid", "ip_address": "128.65.236.88", "dob": "2020-05-07"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "59.0", "first_name": "Adora", "last_name": "Povele", "email": "apovele1m@statcounter.com", "gender": "Genderfluid", "ip_address": "215.67.227.145", "dob": "2020-06-23"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "60.0", "first_name": "Miles", "last_name": "Zapatero", "email": "mzapatero1n@ezinearticles.com", "gender": "Non-binary", "ip_address": "212.252.221.177", "dob": "2020-05-22"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "61.0", "first_name": "Eddie", "last_name": "Menichi", "email": "emenichi1o@about.com", "gender": "Genderqueer", "ip_address": "138.77.252.222", "dob": "2020-02-11"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "62.0", "first_name": "Jakob", "last_name": "Showalter", "email": "jshowalter1p@cargocollective.com", "gender": "Genderfluid", "ip_address": "138.186.250.131", "dob": "2021-01-05"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "63.0", "first_name": "Zebadiah", "last_name": "Geratt", "email": "zgeratt1q@surveymonkey.com", "gender": "Genderfluid", "ip_address": "239.69.201.221", "dob": "2020-05-02"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "64.0", "first_name": "Carleton", "last_name": "Gayther", "email": "cgayther1r@si.edu", "gender": "Genderqueer", "ip_address": "138.237.56.77", "dob": "2020-03-09"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "65.0", "first_name": "Gwendolyn", "last_name": "Cotgrave", "email": "gcotgrave1s@dyndns.org", "gender": "Agender", "ip_address": "103.26.18.169", "dob": "2020-06-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "66.0", "first_name": "Nikki", "last_name": "Corry", "email": "ncorry1t@dedecms.com", "gender": "Female", "ip_address": "118.138.87.91", "dob": "2020-08-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "67.0", "first_name": "Kat", "last_name": "Figgins", "email": "kfiggins1u@jugem.jp", "gender": "Male", "ip_address": "202.202.94.181", "dob": "2020-06-19"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "68.0", "first_name": "Norean", "last_name": "Trendle", "email": "ntrendle1v@elpais.com", "gender": "Genderqueer", "ip_address": "134.89.22.248", "dob": "2020-08-24"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "69.0", "first_name": "Foster", "last_name": "Durker", "email": "fdurker1w@engadget.com", "gender": "Non-binary", "ip_address": "189.149.34.80", "dob": "2020-11-02"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "70.0", "first_name": "Rod", "last_name": "Jarnell", "email": "rjarnell1x@sphinn.com", "gender": "Genderfluid", "ip_address": "169.148.199.234", "dob": "2020-08-19"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "71.0", "first_name": "Lancelot", "last_name": "Plaxton", "email": "lplaxton1y@spiegel.de", "gender": "Agender", "ip_address": "81.194.71.38", "dob": "2020-09-07"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "72.0", "first_name": "Rudyard", "last_name": "Olliff", "email": "rolliff1z@bbb.org", "gender": "Agender", "ip_address": "113.39.154.178", "dob": "2021-01-07"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "73.0", "first_name": "Shelley", "last_name": "Lipprose", "email": "slipprose20@engadget.com", "gender": "Polygender", "ip_address": "117.254.24.20", "dob": "2021-01-03"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "74.0", "first_name": "Prudi", "last_name": "Boichat", "email": "pboichat21@cam.ac.uk", "gender": "Agender", "ip_address": "99.169.9.122", "dob": "2020-08-25"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "75.0", "first_name": "Denny", "last_name": "Bollum", "email": "dbollum22@skyrock.com", "gender": "Bigender", "ip_address": "77.112.28.180", "dob": "2020-07-31"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "76.0", "first_name": "Lucila", "last_name": "Charteris", "email": "lcharteris23@linkedin.com", "gender": "Genderfluid", "ip_address": "194.161.40.83", "dob": "2020-05-17"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "77.0", "first_name": "Marrissa", "last_name": "Wurz", "email": "mwurz24@pinterest.com", "gender": "Agender", "ip_address": "72.219.43.46", "dob": "2020-09-04"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "78.0", "first_name": "Teresina", "last_name": "Micklewicz", "email": "tmicklewicz25@goo.ne.jp", "gender": "Genderqueer", "ip_address": "214.116.247.204", "dob": "2020-09-13"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "79.0", "first_name": "Idette", "last_name": "Ilieve", "email": "iilieve26@mozilla.com", "gender": "Bigender", "ip_address": "25.25.28.17", "dob": "2020-09-21"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "80.0", "first_name": "Noemi", "last_name": "Lempenny", "email": "nlempenny27@jugem.jp", "gender": "Bigender", "ip_address": "194.139.183.130", "dob": "2020-09-01"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "81.0", "first_name": "Faye", "last_name": "Ashbee", "email": "fashbee28@google.com", "gender": "Bigender", "ip_address": "191.149.120.198", "dob": "2020-03-12"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "82.0", "first_name": "Olly", "last_name": "Siaspinski", "email": "osiaspinski29@amazonaws.com", "gender": "Polygender", "ip_address": "150.134.136.240", "dob": "2020-07-12"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "83.0", "first_name": "Marji", "last_name": "Dahlen", "email": "mdahlen2a@zdnet.com", "gender": "Bigender", "ip_address": "185.226.214.79", "dob": "2020-11-28"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "84.0", "first_name": "Aubine", "last_name": "Genner", "email": "agenner2b@chronoengine.com", "gender": "Genderfluid", "ip_address": "109.51.123.153", "dob": "2020-03-28"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "85.0", "first_name": "Dix", "last_name": "Civitillo", "email": "dcivitillo2c@bluehost.com", "gender": "Female", "ip_address": "112.89.157.163", "dob": "2020-05-30"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "86.0", "first_name": "Birk", "last_name": "Mussolini", "email": "bmussolini2d@wikimedia.org", "gender": "Agender", "ip_address": "235.49.78.159", "dob": "2020-03-05"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "87.0", "first_name": "Lenci", "last_name": "Wager", "email": "lwager2e@fda.gov", "gender": "Agender", "ip_address": "113.145.228.184", "dob": "2020-03-27"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "88.0", "first_name": "Avrit", "last_name": "Yosifov", "email": "ayosifov2f@umn.edu", "gender": "Male", "ip_address": "112.171.167.81", "dob": "2021-01-18"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "89.0", "first_name": "Honor", "last_name": "McMorran", "email": "hmcmorran2g@bbc.co.uk", "gender": "Genderqueer", "ip_address": "11.179.26.90", "dob": "2020-04-07"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "90.0", "first_name": "Lilah", "last_name": "Carnall", "email": "lcarnall2h@barnesandnoble.com", "gender": "Polygender", "ip_address": "51.194.48.153", "dob": "2020-06-13"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "91.0", "first_name": "Daffie", "last_name": "Cheke", "email": "dcheke2i@theatlantic.com", "gender": "Polygender", "ip_address": "158.53.238.38", "dob": "2020-11-12"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "92.0", "first_name": "Ariel", "last_name": "Minor", "email": "aminor2j@blogger.com", "gender": "Polygender", "ip_address": "29.0.88.144", "dob": "2020-07-16"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "93.0", "first_name": "Kenna", "last_name": "Spraggon", "email": "kspraggon2k@google.fr", "gender": "Agender", "ip_address": "139.245.147.77", "dob": "2020-11-15"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "94.0", "first_name": "Evelyn", "last_name": "Oleshunin", "email": "eoleshunin2l@istockphoto.com", "gender": "Genderqueer", "ip_address": "26.117.119.59", "dob": "2020-08-11"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "95.0", "first_name": "Marcel", "last_name": "Kuhnt", "email": "mkuhnt2m@google.com.au", "gender": "Genderfluid", "ip_address": "84.158.205.130", "dob": "2020-08-22"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "96.0", "first_name": "Wendye", "last_name": "Wigelsworth", "email": "wwigelsworth2n@webs.com", "gender": "Polygender", "ip_address": "241.71.79.173", "dob": "2020-02-26"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "97.0", "first_name": "Nonie", "last_name": "Cadany", "email": "ncadany2o@cdbaby.com", "gender": "Female", "ip_address": "87.132.223.229", "dob": "2020-05-30"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "98.0", "first_name": "Arthur", "last_name": "Norsister", "email": "anorsister2p@csmonitor.com", "gender": "Male", "ip_address": "21.50.95.6", "dob": "2020-05-13"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "99.0", "first_name": "Auria", "last_name": "Haryngton", "email": "aharyngton2q@mapquest.com", "gender": "Non-binary", "ip_address": "246.28.159.95", "dob": "2020-06-28"}, "emitted_at": 1649842201000} +{"stream": "aws_s3_sample", "data": {"id": "100.0", "first_name": "Phelia", "last_name": "Simmig", "email": "psimmig2r@example.com", "gender": "Agender", "ip_address": "205.35.103.161", "dob": "2020-04-05"}, "emitted_at": 1649842201000} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/sheet.py b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/sheet.py new file mode 100644 index 000000000000..570849225937 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/sheet.py @@ -0,0 +1,92 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import logging +from functools import cached_property +from typing import Any, Dict, Iterable, Mapping, Optional, Tuple + +import smartsheet + + +class SmartSheetAPIWrapper: + def __init__(self, config: Mapping[str, Any]): + self._spreadsheet_id = config["spreadsheet_id"] + self._access_token = config["access_token"] + api_client = smartsheet.Smartsheet(self._access_token) + api_client.errors_as_exceptions(True) + # each call to `Sheets` makes a new instance, so we save it here to make no more new objects + self._get_sheet = api_client.Sheets.get_sheet + self._data = None + + def _fetch_sheet(self, from_dt: Optional[str] = None) -> None: + kwargs = {"rows_modified_since": from_dt} + if not from_dt: + kwargs["page_size"] = 1 + self._data = self._get_sheet(self._spreadsheet_id, **kwargs) + + @staticmethod + def _column_to_property(column_type: str) -> Dict[str, any]: + type_mapping = { + "TEXT_NUMBER": {"type": "string"}, + "DATE": {"type": "string", "format": "date"}, + "DATETIME": {"type": "string", "format": "date-time"}, + } + return type_mapping.get(column_type, {"type": "string"}) + + def _construct_record(self, row: smartsheet.models.Row) -> Dict[str, str]: + values_column_map = {cell.column_id: str(cell.value or "") for cell in row.cells} + record = {column.title: values_column_map[column.id] for column in self.data.columns} + record["modifiedAt"] = row.modified_at.isoformat() + return record + + @property + def data(self) -> smartsheet.models.Row: + if not self._data: + self._fetch_sheet() + return self._data + + @property + def name(self) -> str: + return self.data.name + + @property + def row_count(self) -> int: + return len(self.data.rows) + + @cached_property + def primary_key(self) -> str: + for column in self.data.columns: + if column.primary: + return column.title + + @cached_property + def json_schema(self) -> Dict[str, Any]: + column_info = {column.title: self._column_to_property(column.type.value) for column in self.data.columns} + column_info["modifiedAt"] = {"type": "string", "format": "date-time"} # add cursor field explicitly + json_schema = { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": column_info, + } + return json_schema + + def read_records(self, from_dt: str) -> Iterable[Dict[str, str]]: + self._fetch_sheet(from_dt) + for row in self.data.rows: + yield self._construct_record(row) + + def check_connection(self, logger: logging.Logger) -> Tuple[bool, Optional[str]]: + try: + _ = self.data + except smartsheet.exceptions.ApiError as e: + err = e.error.result + code = 404 if err.code == 1006 else err.code + reason = f"{err.name}: {code} - {err.message} | Check your spreadsheet ID." + logger.error(reason) + return False, reason + except Exception as e: + reason = str(e) + logger.error(reason) + return False, reason + return True, None diff --git a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/source.py b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/source.py index 542cdb042103..3bce5c71f7e4 100644 --- a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/source.py +++ b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/source.py @@ -2,120 +2,21 @@ # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # +import logging +from typing import Any, List, Mapping, Tuple -import json -from datetime import datetime -from typing import Dict, Generator, List +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream -import smartsheet -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.models import ( - AirbyteCatalog, - AirbyteConnectionStatus, - AirbyteMessage, - AirbyteRecordMessage, - AirbyteStream, - ConfiguredAirbyteCatalog, - Status, - Type, -) -from airbyte_cdk.sources import Source +from .sheet import SmartSheetAPIWrapper +from .streams import SmartsheetStream -def get_prop(col_type: str) -> Dict[str, any]: - props = { - "TEXT_NUMBER": {"type": "string"}, - "DATE": {"type": "string", "format": "date"}, - "DATETIME": {"type": "string", "format": "date-time"}, - } - return props.get(col_type, {"type": "string"}) +class SourceSmartsheets(AbstractSource): + def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> Tuple[bool, any]: + sheet = SmartSheetAPIWrapper(config) + return sheet.check_connection(logger) - -def construct_record(sheet_columns: List[Dict], row_cells: List[Dict]) -> Dict: - # convert all data to string as it is only expected format in schema - values_column_map = {cell["columnId"]: str(cell.get("value", "")) for cell in row_cells} - return {column["title"]: values_column_map[column["id"]] for column in sheet_columns} - - -def get_json_schema(sheet_columns: List[Dict]) -> Dict: - column_info = {column["title"]: get_prop(column["type"]) for column in sheet_columns} - json_schema = { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": column_info, - } - return json_schema - - -class SourceSmartsheets(Source): - def check(self, logger: AirbyteLogger, config: json) -> AirbyteConnectionStatus: - try: - access_token = config["access_token"] - spreadsheet_id = config["spreadsheet_id"] - - smartsheet_client = smartsheet.Smartsheet(access_token) - smartsheet_client.errors_as_exceptions(True) - smartsheet_client.Sheets.get_sheet(spreadsheet_id) - - return AirbyteConnectionStatus(status=Status.SUCCEEDED) - except Exception as e: - if isinstance(e, smartsheet.exceptions.ApiError): - err = e.error.result - code = 404 if err.code == 1006 else err.code - reason = f"{err.name}: {code} - {err.message} | Check your spreadsheet ID." - else: - reason = str(e) - logger.error(reason) - return AirbyteConnectionStatus(status=Status.FAILED) - - def discover(self, logger: AirbyteLogger, config: json) -> AirbyteCatalog: - access_token = config["access_token"] - spreadsheet_id = config["spreadsheet_id"] - streams = [] - - smartsheet_client = smartsheet.Smartsheet(access_token) - try: - sheet = smartsheet_client.Sheets.get_sheet(spreadsheet_id) - sheet = json.loads(str(sheet)) # make it subscriptable - sheet_json_schema = get_json_schema(sheet["columns"]) - logger.info(f"Running discovery on sheet: {sheet['name']} with {spreadsheet_id}") - - stream = AirbyteStream(name=sheet["name"], json_schema=sheet_json_schema) - stream.supported_sync_modes = ["full_refresh"] - streams.append(stream) - - except Exception as e: - raise Exception(f"Could not run discovery: {str(e)}") - - return AirbyteCatalog(streams=streams) - - def read( - self, logger: AirbyteLogger, config: json, catalog: ConfiguredAirbyteCatalog, state: Dict[str, any] - ) -> Generator[AirbyteMessage, None, None]: - - access_token = config["access_token"] - spreadsheet_id = config["spreadsheet_id"] - smartsheet_client = smartsheet.Smartsheet(access_token) - - for configured_stream in catalog.streams: - stream = configured_stream.stream - try: - sheet = smartsheet_client.Sheets.get_sheet(spreadsheet_id) - sheet = json.loads(str(sheet)) # make it subscriptable - logger.info(f"Starting syncing spreadsheet {sheet['name']}") - logger.info(f"Row count: {sheet['totalRowCount']}") - - for row in sheet["rows"]: - try: - record = construct_record(sheet["columns"], row["cells"]) - yield AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage(stream=stream.name, data=record, emitted_at=int(datetime.now().timestamp()) * 1000), - ) - except Exception as e: - logger.error(f"Unable to encode row into an AirbyteMessage with the following error: {e}") - - except Exception as e: - logger.error(f"Could not read smartsheet: {stream.name}") - raise e - logger.info(f"Finished syncing spreadsheet with ID: {spreadsheet_id}") + def streams(self, config: Mapping[str, Any]) -> List["Stream"]: + sheet = SmartSheetAPIWrapper(config) + return [SmartsheetStream(sheet, config)] diff --git a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json index 57876a9a81a1..93c5d422ea23 100644 --- a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json +++ b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json @@ -17,6 +17,14 @@ "title": "Sheet ID", "description": "The spreadsheet ID. Find in the spreadsheet menu: File > Properties", "type": "string" + }, + "start_datetime": { + "title": "Start Datetime", + "type": "string", + "examples": ["2000-01-01T13:00:00", "2000-01-01T13:00:00-07:00"], + "description": "ISO 8601, for instance: `YYYY-MM-DDTHH:MM:SS`, `YYYY-MM-DDTHH:MM:SS+HH:MM`", + "format": "date-time", + "default": "2020-01-01T00:00:00+00:00" } } }, diff --git a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/streams.py b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/streams.py new file mode 100644 index 000000000000..5a5893923760 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/streams.py @@ -0,0 +1,58 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import datetime +from typing import Any, Dict, Iterable, List, Mapping + +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.streams import Stream +from source_smartsheets.sheet import SmartSheetAPIWrapper + + +class SmartsheetStream(Stream): + cursor_field = "modifiedAt" + + def __init__(self, smartsheet: SmartSheetAPIWrapper, config: Mapping[str, Any]): + self.smartsheet = smartsheet + self._state = {} + self._config = config + self._start_datetime = self._config.get("start_datetime") or "2020-01-01T00:00:00+00:00" + + @property + def primary_key(self) -> str: + return self.smartsheet.primary_key + + def get_json_schema(self) -> Dict[str, Any]: + return self.smartsheet.json_schema + + @property + def name(self) -> str: + return self.smartsheet.name + + @property + def state(self) -> Mapping[str, Any]: + if not self._state: + self._state = {self.cursor_field: self._start_datetime} + return self._state + + @state.setter + def state(self, value: Mapping[str, Any]): + self._state = value + + def read_records( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, Any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Iterable[Mapping[str, Any]]: + def iso_dt(src): + return datetime.datetime.fromisoformat(src) + + for record in self.smartsheet.read_records(self.state[self.cursor_field]): + current_cursor_value = iso_dt(self.state[self.cursor_field]) + latest_cursor_value = iso_dt(record[self.cursor_field]) + new_cursor_value = max(latest_cursor_value, current_cursor_value) + self.state = {self.cursor_field: new_cursor_value.isoformat("T", "seconds")} + yield record diff --git a/airbyte-integrations/connectors/source-smartsheets/unit_tests/conftest.py b/airbyte-integrations/connectors/source-smartsheets/unit_tests/conftest.py new file mode 100644 index 000000000000..e168f2fe831c --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/unit_tests/conftest.py @@ -0,0 +1,34 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import json +from pathlib import Path +from unittest.mock import Mock + +import pytest +from smartsheet.models import Sheet + +HERE = Path(__file__).parent.absolute() + + +@pytest.fixture +def response_mock(): + with open(HERE / "response.json") as json_file: + return json.loads(json_file.read()) + + +@pytest.fixture +def config(): + return {"spreadsheet_id": "id", "access_token": "token"} + + +@pytest.fixture +def get_sheet_mocker(mocker, response_mock): + def _mocker(api_wrapper, data=None): + sheet_obj = Sheet(props=response_mock, base_obj=api_wrapper) + get_sheet_mock = Mock(return_value=sheet_obj) + mocker.patch.object(api_wrapper, "_get_sheet", data or get_sheet_mock) + return get_sheet_mock, sheet_obj + + return _mocker diff --git a/airbyte-integrations/connectors/source-smartsheets/unit_tests/response.json b/airbyte-integrations/connectors/source-smartsheets/unit_tests/response.json new file mode 100644 index 000000000000..99e8122ceef2 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/unit_tests/response.json @@ -0,0 +1,251 @@ +{ + "accessLevel": "OWNER", + "columns": [ + { + "id": 1101932201830276, + "index": 0, + "primary": true, + "title": "id", + "type": "TEXT_NUMBER", + "validation": false, + "version": 0, + "width": 51 + }, + { + "id": 5605531829200772, + "index": 1, + "title": "first_name", + "type": "TEXT_NUMBER", + "validation": false, + "version": 0, + "width": 275 + }, + { + "id": 3353732015515524, + "index": 2, + "title": "last_name", + "type": "TEXT_NUMBER", + "validation": false, + "version": 0, + "width": 224 + }, + { + "id": 7857331642886020, + "index": 3, + "title": "email", + "type": "TEXT_NUMBER", + "validation": false, + "version": 0, + "width": 231 + }, + { + "id": 2227832108672900, + "index": 4, + "options": [ + "Agender", + "Bigender", + "Female", + "Genderfluid", + "Genderqueer", + "Male", + "Non-binary", + "Polygender" + ], + "title": "gender", + "type": "PICKLIST", + "validation": false, + "version": 0, + "width": 193 + }, + { + "id": 6731431736043396, + "index": 5, + "title": "ip_address", + "type": "TEXT_NUMBER", + "validation": false, + "version": 0, + "width": 206 + }, + { + "id": 4479631922358148, + "index": 6, + "title": "dob", + "type": "DATE", + "validation": false, + "version": 0, + "width": 201 + } + ], + "createdAt": "2021-08-27T11:36:41+00:00", + "dependenciesEnabled": false, + "effectiveAttachmentOptions": [ + "GOOGLE_DRIVE", + "DROPBOX", + "ONEDRIVE", + "LINK", + "EVERNOTE", + "BOX_COM", + "FILE", + "EGNYTE" + ], + "ganttEnabled": false, + "hasSummaryFields": false, + "id": 679252988323716, + "modifiedAt": "2022-04-13T06:50:10+00:00", + "name": "aws_s3_sample", + "permalink": "https://app.smartsheet.com/sheets/v7vHw7qHJChcvfHQ8j3xJpG8H82Fh39Rc9PRGvQ1", + "resourceManagementEnabled": false, + "rows": [ + { + "cells": [ + { "columnId": 1101932201830276, "displayValue": "1", "value": 1.0 }, + { + "columnId": 5605531829200772, + "displayValue": "Joni", + "value": "Joni" + }, + { + "columnId": 3353732015515524, + "displayValue": "Watling", + "value": "Watling" + }, + { + "columnId": 7857331642886020, + "displayValue": "jwatling0@amazonaws.com", + "value": "jwatling0@amazonaws.com" + }, + { + "columnId": 2227832108672900, + "displayValue": "Genderqueer", + "value": "Genderqueer" + }, + { + "columnId": 6731431736043396, + "displayValue": "195.50.216.194", + "value": "195.50.216.194" + }, + { "columnId": 4479631922358148, "value": "2020-11-23" } + ], + "createdAt": "2021-08-27T11:36:41+00:00", + "expanded": true, + "id": 3201922565072772, + "modifiedAt": "2021-08-27T11:36:41+00:00", + "rowNumber": 1 + }, + { + "cells": [ + { "columnId": 1101932201830276, "displayValue": "2", "value": 2.0 }, + { + "columnId": 5605531829200772, + "displayValue": "Bernardo", + "value": "Bernardo" + }, + { + "columnId": 3353732015515524, + "displayValue": "Klaaassen", + "value": "Klaaassen" + }, + { + "columnId": 7857331642886020, + "displayValue": "bklaaassen1@cbc.ca", + "value": "bklaaassen1@cbc.ca" + }, + { + "columnId": 2227832108672900, + "displayValue": "Polygender", + "value": "Polygender" + }, + { + "columnId": 6731431736043396, + "displayValue": "116.208.253.97", + "value": "116.208.253.97" + }, + { "columnId": 4479631922358148, "value": "2020-02-22" } + ], + "createdAt": "2021-08-27T11:36:41+00:00", + "expanded": true, + "id": 7705522192443268, + "modifiedAt": "2021-08-27T11:36:41+00:00", + "rowNumber": 2, + "siblingId": 3201922565072772 + }, + { + "cells": [ + { "columnId": 1101932201830276, "displayValue": "3", "value": 3.0 }, + { + "columnId": 5605531829200772, + "displayValue": "Drake", + "value": "Drake" + }, + { + "columnId": 3353732015515524, + "displayValue": "Bednell", + "value": "Bednell" + }, + { + "columnId": 7857331642886020, + "displayValue": "dbednell2@theguardian.com", + "value": "dbednell2@theguardian.com" + }, + { + "columnId": 2227832108672900, + "displayValue": "Non-binary", + "value": "Non-binary" + }, + { + "columnId": 6731431736043396, + "displayValue": "120.15.24.132", + "value": "120.15.24.132" + }, + { "columnId": 4479631922358148, "value": "2020-08-21" } + ], + "createdAt": "2021-08-27T11:36:41+00:00", + "expanded": true, + "id": 2076022658230148, + "modifiedAt": "2021-08-27T11:36:41+00:00", + "rowNumber": 3, + "siblingId": 7705522192443268 + }, + { + "cells": [ + { "columnId": 1101932201830276, "displayValue": "4", "value": 4.0 }, + { + "columnId": 5605531829200772, + "displayValue": "Alfreda", + "value": "Alfreda" + }, + { + "columnId": 3353732015515524, + "displayValue": "Brumbye", + "value": "Brumbye" + }, + { + "columnId": 7857331642886020, + "displayValue": "abrumbye3@howstuffworks.com", + "value": "abrumbye3@howstuffworks.com" + }, + { + "columnId": 2227832108672900, + "displayValue": "Genderqueer", + "value": "Genderqueer" + }, + { + "columnId": 6731431736043396, + "displayValue": "64.22.217.122", + "value": "64.22.217.122" + }, + { "columnId": 4479631922358148, "value": "2020-12-29" } + ], + "createdAt": "2021-08-27T11:36:41+00:00", + "expanded": true, + "id": 6579622285600644, + "modifiedAt": "2021-08-27T11:36:41+00:00", + "rowNumber": 4, + "siblingId": 2076022658230148 + } + ], + "totalRowCount": 100, + "userPermissions": { "summaryPermissions": "ADMIN" }, + "userSettings": { "criticalPathEnabled": false, "displaySummaryTasks": true }, + "version": 9 +} diff --git a/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_sheets.py b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_sheets.py new file mode 100644 index 000000000000..662c7b24882b --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_sheets.py @@ -0,0 +1,119 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import logging +from itertools import permutations +from unittest.mock import ANY, Mock + +import pytest +from smartsheet.exceptions import ApiError +from source_smartsheets.sheet import SmartSheetAPIWrapper + + +def test_fetch_sheet(config, get_sheet_mocker): + spreadsheet_id = config["spreadsheet_id"] + sheet = SmartSheetAPIWrapper(config) + mock, resp = get_sheet_mocker(sheet) + + sheet._fetch_sheet() + mock.assert_called_once_with(spreadsheet_id, rows_modified_since=None, page_size=1) + assert sheet.data == resp + + sheet._fetch_sheet(from_dt="2022-03-04T00:00:00Z") + mock.assert_called_with(spreadsheet_id, rows_modified_since="2022-03-04T00:00:00Z") + assert sheet.data == resp + + +def test_properties(config, get_sheet_mocker): + sheet = SmartSheetAPIWrapper(config) + _, resp = get_sheet_mocker(sheet) + assert sheet.data == resp + assert sheet.name == "aws_s3_sample" + assert sheet.row_count == 4 + assert sheet.primary_key == "id" + + +@pytest.mark.parametrize( + ("column_type", "expected_schema"), + ( + ("TEXT_NUMBER", {"type": "string"}), + ("DATE", {"type": "string", "format": "date"}), + ("DATETIME", {"type": "string", "format": "date-time"}), + ("DURATION", {"type": "string"}), + ), +) +def test_column_types(config, column_type, expected_schema): + sheet = SmartSheetAPIWrapper(config) + assert sheet._column_to_property(column_type) == expected_schema + + +def test_json_schema(config, get_sheet_mocker): + sheet = SmartSheetAPIWrapper(config) + _ = get_sheet_mocker(sheet) + json_schema = sheet.json_schema + assert json_schema["$schema"] == "http://json-schema.org/draft-07/schema#" + assert json_schema["type"] == "object" + assert "properties" in json_schema + assert "modifiedAt" in json_schema["properties"] + + +def _make_api_error(code, message, name): + result_mock = Mock(code=code, message=message) + result_mock.name = name + return ApiError(error=Mock(result=result_mock)) + + +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + ( + (Exception("Internal Server Error"), "Internal Server Error"), + ( + _make_api_error(code=1006, message="Resource not found", name="Not Found"), + "Not Found: 404 - Resource not found | Check your spreadsheet ID.", + ), + ( + _make_api_error(code=4003, message="Too many requests", name="Limit reached"), + "Limit reached: 4003 - Too many requests | Check your spreadsheet ID.", + ), + ), +) +def test_check_connection_fail(mocker, config, side_effect, expected_error): + sheet = SmartSheetAPIWrapper(config) + with mocker.patch.object(sheet, "_get_sheet", side_effect=side_effect): + status, error = sheet.check_connection(logger=logging.getLogger()) + assert error == expected_error + assert status is False + + +def test_check_connection_success(mocker, config): + sheet = SmartSheetAPIWrapper(config) + with mocker.patch.object(sheet, "_get_sheet"): + status, error = sheet.check_connection(logger=logging.getLogger()) + assert error is None + assert status is True + + +_columns = [ + Mock(id="1101932201830276", title="id", type="TEXT_NUMBER"), + Mock(id="5605531829200772", title="first_name", type="TEXT_NUMBER"), + Mock(id="3353732015515524", title="last_name", type="TEXT_NUMBER"), +] + + +_cells = [ + Mock(column_id="1101932201830276", value="11"), + Mock(column_id="5605531829200772", value="Leonardo"), + Mock(column_id="3353732015515524", value="Dicaprio"), +] + + +@pytest.mark.parametrize(("row", "columns"), (*((perm, _columns) for perm in permutations(_cells)), ([], _columns), ([], []))) +def test_different_cell_order_produces_same_result(get_sheet_mocker, config, row, columns): + sheet = SmartSheetAPIWrapper(config) + sheet_mock = Mock(rows=[Mock(cells=row)] if row else [], columns=columns) + get_sheet_mocker(sheet, data=Mock(return_value=sheet_mock)) + + records = sheet.read_records(from_dt="2020-01-01T00:00:00Z") + expected_records = [] if not row else [{"id": "11", "first_name": "Leonardo", "last_name": "Dicaprio", "modifiedAt": ANY}] + assert list(records) == expected_records diff --git a/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_source.py b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_source.py index 09bfcec10c48..4b71c4d0cedd 100644 --- a/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_source.py @@ -2,45 +2,23 @@ # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # -import json -from itertools import permutations +import logging from unittest.mock import Mock import pytest from source_smartsheets.source import SourceSmartsheets +from source_smartsheets.streams import SmartsheetStream -@pytest.fixture -def config(): - return {"access_token": "token", "spreadsheet_id": "id"} - - -@pytest.fixture(name="catalog") -def configured_catalog(): - stream_mock = Mock() - stream_mock.name = "test" # cannot be used in __init__ - return Mock(streams=[Mock(stream=stream_mock)]) - - -_columns = [ - {"id": "1101932201830276", "title": "id", "type": "TEXT_NUMBER"}, - {"id": "5605531829200772", "title": "first_name", "type": "TEXT_NUMBER"}, - {"id": "3353732015515524", "title": "last_name", "type": "TEXT_NUMBER"}, -] - - -_cells = [ - {"columnId": "1101932201830276", "value": "11"}, - {"columnId": "5605531829200772", "value": "Leonardo"}, - {"columnId": "3353732015515524", "value": "Dicaprio"}, -] +@pytest.mark.parametrize("connection_status", ((True, None), (False, "Internal Server Error"))) +def test_check_connection(mocker, config, connection_status): + mocker.patch("source_smartsheets.source.SmartSheetAPIWrapper.check_connection", Mock(return_value=connection_status)) + source = SourceSmartsheets() + assert source.check_connection(logger=logging.getLogger(), config=config) == connection_status -@pytest.mark.parametrize(("row", "columns"), (*((perm, _columns) for perm in permutations(_cells)), ([], _columns), ([], []))) -def test_different_cell_order_produces_one_result(mocker, config, catalog, row, columns): - sheet = json.dumps({"name": "test", "totalRowCount": 3, "columns": columns, "rows": [{"cells": row}] if row else []}) - mocker.patch("smartsheet.Smartsheet", Mock(return_value=Mock(Sheets=Mock(get_sheet=Mock(return_value=sheet))))) +def test_streams(config): source = SourceSmartsheets() - records = [message.record.data for message in source.read(logger=Mock(), config=config, catalog=catalog, state={})] - expected_records = [] if not row else [{"id": "11", "first_name": "Leonardo", "last_name": "Dicaprio"}] - assert list(records) == expected_records + streams_iter = iter(source.streams(config)) + assert type(next(streams_iter)) == SmartsheetStream + assert next(streams_iter, None) is None diff --git a/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_streams.py new file mode 100644 index 000000000000..eec95654047d --- /dev/null +++ b/airbyte-integrations/connectors/source-smartsheets/unit_tests/test_streams.py @@ -0,0 +1,21 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import datetime +from unittest.mock import Mock + +from airbyte_cdk.models import SyncMode +from source_smartsheets.streams import SmartsheetStream + + +def test_state_saved_after_each_record(config, get_sheet_mocker): + today_dt = datetime.datetime.now(datetime.timezone.utc) + before_yesterday = (today_dt - datetime.timedelta(days=2)).isoformat(timespec="seconds") + today = today_dt.isoformat(timespec="seconds") + record = {"id": "1", "name": "Georgio", "last_name": "Armani", "modifiedAt": today} + stream = SmartsheetStream(Mock(read_records=Mock(return_value=[record])), config) + stream.state = {stream.cursor_field: before_yesterday} + for _ in stream.read_records(SyncMode.incremental): + assert _ == record + assert stream.state == {stream.cursor_field: today} diff --git a/docs/integrations/sources/smartsheets.md b/docs/integrations/sources/smartsheets.md index afdf7682f02d..595e348d673d 100644 --- a/docs/integrations/sources/smartsheets.md +++ b/docs/integrations/sources/smartsheets.md @@ -86,7 +86,8 @@ To setup your new Smartsheets source, Airbyte will need: ## Changelog -| Version | Date | Pull Request | Subject | -|:--------|:-----------|:---------------------------------------------------------|:--------------------------| -| 0.1.9 | 2022-04-12 | [11911](https://github.com/airbytehq/airbyte/pull/11911) | Bugfix: scrambled columns | -| 0.1.8 | 2022-02-04 | [9792](https://github.com/airbytehq/airbyte/pull/9792) | Added oauth support | +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------| +| 0.1.10 | 2022-04-15 | [12077](https://github.com/airbytehq/airbyte/pull/12077) | Implement incremental read and improve code test coverage | +| 0.1.9 | 2022-04-12 | [11911](https://github.com/airbytehq/airbyte/pull/11911) | Bugfix: scrambled columns | +| 0.1.8 | 2022-02-04 | [9792](https://github.com/airbytehq/airbyte/pull/9792) | Added oauth support | From a33cbf6b186b32e03548bc82f7cb715b68b87f88 Mon Sep 17 00:00:00 2001 From: Edmundo Ruiz Ghanem <168664+edmundito@users.noreply.github.com> Date: Wed, 27 Apr 2022 14:57:12 -0400 Subject: [PATCH 30/39] Update empty states for connections, sources, and destinations (#12320) * Update empty connections page with new empty state * Add EmptyListPage component * Update src/dest/conn CTAs to capitalize the word New * Update empty states for sources and destinations pages * Rename EmptyListPage -> EmptyResourceListPage component Add data-id to EmptyListResourcePage button * Update EmptyResourceList prop from enable create button to disable * EmptyResourceListPage -> EmptyResourceListState * EmptyResourceListState -> EmptyResourceListView --- airbyte-webapp/public/images/bowtie-half.svg | 31 ++++++ .../images/octavia/empty-connections.png | Bin 0 -> 18118 bytes .../images/octavia/empty-destinations.png | Bin 0 -> 17703 bytes .../public/images/octavia/empty-sources.png | Bin 0 -> 17554 bytes .../EmptyResourceListView.tsx | 96 ++++++++++++++++++ .../components/EmptyResourceListView/index.ts | 1 + .../src/components/base/Button/Button.tsx | 8 +- .../components/base/Button/LoadingButton.tsx | 8 +- .../src/components/base/Button/types.tsx | 4 +- airbyte-webapp/src/locales/en.json | 9 +- .../AllConnectionsPage/AllConnectionsPage.tsx | 46 +++++---- .../AllDestinationsPage.tsx | 14 ++- .../pages/AllSourcesPage/AllSourcesPage.tsx | 8 +- .../Sections/auth/GoogleAuthButton.tsx | 4 +- 14 files changed, 182 insertions(+), 47 deletions(-) create mode 100644 airbyte-webapp/public/images/bowtie-half.svg create mode 100644 airbyte-webapp/public/images/octavia/empty-connections.png create mode 100644 airbyte-webapp/public/images/octavia/empty-destinations.png create mode 100644 airbyte-webapp/public/images/octavia/empty-sources.png create mode 100644 airbyte-webapp/src/components/EmptyResourceListView/EmptyResourceListView.tsx create mode 100644 airbyte-webapp/src/components/EmptyResourceListView/index.ts diff --git a/airbyte-webapp/public/images/bowtie-half.svg b/airbyte-webapp/public/images/bowtie-half.svg new file mode 100644 index 000000000000..48372107abbd --- /dev/null +++ b/airbyte-webapp/public/images/bowtie-half.svg @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/airbyte-webapp/public/images/octavia/empty-connections.png b/airbyte-webapp/public/images/octavia/empty-connections.png new file mode 100644 index 0000000000000000000000000000000000000000..e40446edb42d8bb79278e9a2e24bbcabcae1e849 GIT binary patch literal 18118 zcmV)_K!3l9P)d0p80vzl6jLdo5=Z3% zm1-*aRMM$bP^p3ojRF^8ffy=%AQ6)Z*q%yP!!ds<5vcv0^oDJ{VPB`HW0Gc2DW#GN ziPXEPoHCTnR4U;@gK(u0mL+93D&bUar!oc-Q7^{tP);R{$~G$BQ8{ebcfkb{u#~)k z%C}UKAxT8Li-()1KE%JdKEO9rA0FIV-!`JN-pj*BAM78l_w?}A>vV2Y6IJQyu?w6;YgiRlNBk(Ywx3rBDvFBzxO_93ev4TUAS zmOi==lH{FFWhIp&xS)y4jj)5LOs4V}l^{zEJGAO6h}o)HdvtEyAD!9`0!dJiw1iIQ zxSvI@*Ata|WufFyZ8GHkn`z~w^Nfm9x+a;K$6 zF_GOctox0KBPlwp+f-aiq-CE%()lCUbZ{{?AN)y3ZWUPPT}7Iy5grtUsY4&fh#t2gBBZTDrKh8iUy_aG+ddJJ_i)ly^C&9zr6HOUBU|X>2MQlCv3(BiXl!w?k z!Ip*XxB}5s2-}9rbC7h@5cR=MZTe&8)K5v8_H)WEzacoAaTK4fcoO?gu2EL!LF!>x zO{m5dgr@Lxhf?{8N=x;DZqY+=$LJT)zte=vj;KoL$!xZ2_k4V{`d`Q?Jf}Rwo;23u zPv8oHGe6}PkR-@$P#M;Tb-w{q20w;g?M4dQ=aquW>I$s*>r>2I^*k!8%H`@DDi1?? zsV{ptD`9(3;Za8`^?{qOehLpxd{-FsuT&(Y?#AqeHzOnWtX#!03a?Um7ngawg0nq! z7MIhhETz&+d7x|b5WH~bB1|0cpfFcm$p{Z>g%MZXf^#{ENXZnw1%8ek7s0_h{GwmB z(PGX-SZ3iINP`J)xl%{}CJlTD|G0GlT8DRp3ljXuO+2#4t#Bu6NPOZ(x%w(9J*h0m zWh!Z%MI~Tq`URvQ6(b@;qw(xsu}&#^N%x*E$1X z+2_x`^S;W;F42SW(w&PD5!?nYSVuzY9=!6^6lB+05jH}yAY4`Lv9IP6aN6GdfgrWJwd9Am-bfy+kHUP=ke{!+Gv4^>u99`rCC zpZeKlZC7nfBwaX+S^ph{{E{rWY73R|kY++Od0Z+9%aU~&m637<6ZD>IX5;<||87zy zD|vp5wKkTZoM)qTO-skr2nmQlkGK)obnr*nRvQgzro}qc&P!{uxD?D_ot4ApCdd^` z*89f)TX?P1CMy=S&g>h|B9D6FS3|F_CbTv3h6lGsyB0mL@6;NURfsqQR=HS6tVR>Z zrRaA#c;#QpifN->z(c0(niDp5&VSUe3OsvBSyFD%W)-V=Xf^Pwg5KsTVU zB!@`pMh{h@9*6kJ@b?Zvx7cC0W%w*~kGY!Kd6*E?QBu^=s3o%=KWuml^H;qfA7T^2 zFe-^?qPUoZW&6}7DzS1!Y>TV#?%&rT*uS~hclQF?5~aiABnhiVQ6_NY57Z0U1!-8Z z{WGjR@D&COXpLcmyJ0||4rmb>3O`?Ol$BMAU*p{Q92`A<7CZMOV8zOv$jdJl=XY$~ z4^sv|igA7JfzGW74G8H4{QS44@ypf^<(gxVrVwmTW{pVJW(ii}=Z#zd&?ES2zkhIN~tE^sLh;r~CEu4MwYG9ndPggG1taOUeo` z^V31}8OP+Rm#DmnCWwngSe}>klDy;Q=7G77ZWmIE;Z0qC*YOf*&4JZ{150R_YwI|d zB+GBR{)cbYyaNBgYW&{^k7DBZ{-`fbC8lH6%k#0E$Qu^al3c}Kk=^qe)E8wH#aOq0 zAvPRXgkxuSD5I~8@Sz{1SNky-MQT&exX}%=OP@J^7|+kU1~~=7vA{93?7}^QCWeba zSRSGBL?xq=I=rb&9kTB>m49WR;&gzYiHpKc9NCn|G)m-E##`) zidj$Ggjb)NCh44Rc2SVPip zq|N7UieHS6md${VPZgFfd=Xa<>1MWVWn~rO4=3UMPkzC#t9GKOxC{}bN#UKqLMa7hH$KXSU$A7w*E#&)!aCB^}U*-Uwe(pFP5ftlo68yHuFOo-OHu6(pAP zVJkYeZ-p+MT4T}An{Xm^HzxGIAMS3}lee&B`(fi-$S631-&2a{7f~(4aMSfeaWj>av@GmDd=%^UeTVX@QuK}+W4CnYNtx^REksGV zkldZAe2ykG&89I4%ia}^z>1P9IHr~7UNJn#BG7&W-GGHL#f1bKXzi4?)g)DCWMf<7 zzrLO#$U5us8wFu??nKCd0Yr#a)X2Q|LGKZQ)vHz@HSGc-!b9NcX`OtXI&0??uLx>Ox8SE6^+~ z2)Exf9I-Jiuz?1G`w}*i>yyP07E9GoRmt- z+k?JgFswnR^TLk!GK`w^JgFzsaNlDeQCDo;L-E@4ccL>9cg6Nkke+Q>XW4sVHNN;` z7G8Po4%{$luq5662TBB0zKS|}gZ*ecvFhj9XdM-fmAk*h++`0#Ut^!g zm6M4K>1s?QGMh>VG!ZnWgk{|*THfi^el(_!dsS?c`ihuA+O}2(>#K>|YAph?dy)m1 zPN%Gp_Ol97@aDqX(7t^neqHnm>4)^NJxN#fw*5IIBqZRs-+mJVs{;oUG4I>eBsps^ zd`NeaVA_(?)6-oD?iDMyqk_6sH~IFs3iQ`fd%5C-0^2>n|o`+5^rPww6=_dfoZUwWjhTngAM0!m?#xnDImC z0nC_eG+Yb4)x%7Bw#<{#Ad3gGbqJ~f)(gM*{aMsd&-T@4Pa`nUkTk=~He0KVJ$v?; z>{nJ+WA@vNFyooeES`7&-Q$JhVe`Qy!c?V&C7a(v5}B3eymAkGeQChw(_Ye7Euf&- zjb0w}lCx{)Hdr+8Ie2?};-^jT;zDjR>_N|Vqew+FMs>2d?SdwN#v<&CP*7}apSuwq z*;RDiUi6%rx7R+YIGgN+?6|cd1WM-c?u0+cmiYt5j_!ql{R~rRSVFn`!5-}0yH~UR ze_yS_f9J0>d!7#wX7Io+C@#wr_Ea@W%L?)5p84o~RXa?XKvI?TWq8HF8eaYPJX|&O zQM~c)4=5?o4riS*aUhfD}CZL_|8#~7n&oPWwY&;-yJgyrsK zqOxMxRW})bD7Fk}jem-Bfa7ZoKd1&-l`4T!>i)TBfjEBuT}mT9>l!v?*c4d2crgwf zIAHepqakCoPdE{u9KTQ1H}N@ zS+RSLfL3Jzv3t90Fs7Rlq~C23SFSd_0nq zYrFXLbJ>`D`)ursKV7FS>+3H+J5AjG*K4F8IOA``J1XL;F|O}D#!EqRBseu1gRs4o zJ0pAE4sTDXdnTU&yfpH%C6oGClwn#Y|L7*$I#VgT=foPJH*(i#Vv5p}?pIR*Td-ik z0wH0oVbPLJC@n45JjRv+cQP%Nl5JDj4fCYH<0(7Qi`0RZEkY%Sh0eOk>|g)-SDhCq zhdf@_-SVn27)Oa~$Mr>I^AH?M-l2>aF!{ae=mmx{u=8XD`59-_a~gebI58YeSaK!R z&U|G(X=KmZmo`8{xUedarLqW_NL0m-?xrHFG|B}gjP0YkDm{scBv`}z`SbDjzyIB$ zbd|wUE+su%^H>FWq*w|Ul$suoPn5doTc#SupW-Dl|58~0vK(wN08R`?6PDR&9R3FSg(0Rztt%@?!v&sp zK(YwPFiKSoiORaKC}LYLM&3L^@7z&iW^Kh;$?d8^ko^37+^HF`Tyz&a3efC*c!otNHP+nfw{Xb=* z%uDzjc&E#mg?bgl6)Bq{5(aP(pjy_7rcqxu-VKkSSNj4*9Xo>b(e*Y2B; z6{*#S{eQPicE3jL6E))Ob}&!1LZt*sYV zR7eDB-Mq8O`;nEG24B)K+qRaw7Cqj4<25|==<{gayt%R6`t|Ga@y8!y`SRtk238HW zY(FgN#0m@^&{@2^nfa;0ViDvQZv3sZIuJf4*K=@sNI)c68+gevk$YFEL^u^3Nmw45 zoAk}&`c5})`-awa!(KQcshdh@kyULS;6J&B`S;#Fo+8T094@4v!mF=;1V;z2o3a8Gf*Zi=hp4laXhJ5--xdPX<4XDI3XNK zSWbu8sdiU~b}efKu3wmG<7yheuOC4YLdFS)s?n^{?eKoVI*BWED)Aw6?{Gq z4Mr3ZqD#ltb?R9a_;bBv9f(z^KUFoAB(X9O8)qv0^YV(2m0f^2bLODF(9*fNCkqdi z<`kvSz^iYEF^Ur;f0p2_>Qq^jj_Ngy2+J!&3C~wFoRqMR)M42yV$_bJ!s5_6yw)`u ztkPdu6{s={=Q6Z(; zx2jEE!=%#dtA$inx^^QXLeWt8`}%0s$3#m%GUt$0jsc*nt8N)#*zeQ+8d4*SQsvUF zSYiQ=B}->!ZP5dAS>H)`LCCUhiILZQ-H!E^Eo0S zi6Bou+OXfB3IEb8?Xk$}_4~!&aeeO<>v+j1D$7N2X+APC^T-iUE7pb%Y`6!sU99%Z zgNkx#09iubHTfksack>p9Wy3gFwxcx%|ncX*M4wPIFhi}DtFY&_;@SSze-ZqiVTTK z)Y27dGnSjFbd^aP7Ss$u{-L6KO*3O|lELoF@Mad5?XrmHpS~3x;_5nq*e1%s)lBH? z2K?RR7~7utg{`crHtE8rTssI20f#lRT~`Y}KGGn;&nKX^XVTR*69UlmeD`c;lsO!z zt1ZSaF&sr${{AL5fFS=`qbvVNo(k7~K8=^E$`og+FQy`{lDxHQfg1*P9WBVPYOQu0 zfD93e_tR_e^RxdM3?I@B|9tvp&13&tunNg38Nw{ozLlvhSVuoC2X$v;<|D75*4TgR zjlH5z${6E{qPi!`f3>8n30W0tLhlaJ%p0DLZ4Lcs@U`jpw-D3T_W4i`-a8(v zm%dH|yt>wbBgfC;r8mD7lJvd_Z_HcD zj(qv?<7TDn(PPOtN9HbeUbJi3Mbo-sVu{hX2G6(i_Y#7Yb&Hm8LO9B-#luUJk$|UX z?SPAQPNNfu?5@RmrY9?>y2u;KA5RyC3yeSWf;)Aqp-_bLd(cd z5tquO-M;fEcJ4bVBp@63JCWIH2pQn}^lWeT{F0J#JoMxzxIk`F&K+>Wu&3+TH|71# z(Sx!6!1q|P;x9~}HpZmQBlk}f1Hu{4d`j2Kvv|)pUq1|=zV`@1Lj%pW`El7cp&oUP z?u)4A(IWVr$9l5aN>^hrX&HW7Hh=3C(OV2JPhXJ}A-_cQ2(fU&wZI%jSQb|%HXkqd z+FfO^HpdRsn3-5?jL$IG4b4G0c$j(f8`!k}H!S;YJHGpABksF<9Be^wkRK+D>nDn> z;NX{g9(iBPnr#sli~l^l+2WLIc@VRHQ#^9XL!~C%jgtps%!nRX{>Kh1{9yxj?K^?O z!cus7c_6w?1V#<-fte3quWA2PqL^Ny|IYtiY+rx%eUk1<+iuKCo%CDk4KKSKW+?x> zdYRE+g4YOBoB_Nr%J*pDYGo>JJ&}NG{3qI0I*eqjcCh*U>9%kDR(fV$j&9jbwf6fwhQh*~|Kg^o9=aAw{db|LHGC zP0xa(arkI5-uiHf*nVW{s|Y6#jhjMW)f#NUOz*|!xj;Xob;tfu;YEO>2+Qs!la9We z7%KCP^{Hg?US=jz_oemB^a_`uK>{dASyoYl{NhTKlvj%|KB1q z75MOT>%lGd;M|2A+;Yzxp-;y~b;fOD9}#3#n*;&VL90@wl8?zKDK|!>Ipv_sQG{j1 z$AlgE#ikM2nWuqMTMWchOHV@^qvSOZm8EGO5w)7O1AIN<%ib;O*Q%%@r?3)dGs^^- zxm|ETbG-4uB3w849+K!anEm$mh`#D?n7?q1F`CL2l#rhKJ?X-eZhH;=M?Zy0t$BRrlw_!ByXR5@f)6b@k=cDZY@rpPKSri z7el+=%T(!nfoC=N_bcvo{S&?6y zX?j%ISWQ~BRlsmHy_@tSmys%P@_Zq=I(6t26-ee6H#1QeDetzT&zS zm3j>Cd6P(sxoX!!Y(MfV4km8Fw=@d>78{HoA-r#7#OLYU>gXrVjxwSD1Gs+3+7w%D8{R+bAf>#_HYk@zw`R@Yk;6_~PT4=on|a*DIeqX8$26{Wj^WYyQH13o6;C@gt_{oGl*K13z0&GUlkm6F z@){IVcV1pyjm}ZQ@FKz%l~rPYQl7||9osw*5y9R#K_`~NjEm}x))5^=%AxMD!;SOM90qzr9-R1| z2m)WX?`v${b{JjyKZLt(9fc?-Ay&vN9<;t!r#Q zyfb4l+D5iB(f4Z<5>;8$gw|BaNF-fyb`@)Zz(s z6o17afw#H)_9SV_bv)QQoSKIUQdoPm34x!tJ93K3sV*O(eq{7-8H|F`N>Ud}>1EL& zHat-3!VM2aHd^pLPY%p0%0ftBbJ1=3`8eKfu6zmA;6UjGEB|S-lstQ|IMH%wNFcn) z4x5>sPxe?(5s+P`m=e>fc^v*e`Cqth=-uKRS)F3No_if%FL#qcDNmA6?(5~hN-1pk zZFr`_eebjvJprVt`sNcdTp~C!fZHTw{?M5%LW)_`(^OJc-jE~Yc_u3VxU9?akJ_zO zh#(`QvPzH4f(i_b4JVtg8xk{$gudIUMUc3FiG+b^*XZXHEQ$?t4{e6t#yZBBKGQL% z%M`5Hw*b42{)*$NyNQ%Vq?2kbJ)onPGZ4`&I$`SYM=-kg4G0PdHL5@7a>{5>sz+FW zC)rRvQB!S@vT9;#{2pp`AgAzxoYjz}>sdG<97$M{Ob-QQzv6sTmbzA%uXd_yaZ<*&#|ipTf6zc$E6cC9LcSNMLp zS1B&9B5O%C+J=*^PHwA={Bj&Rn-6d5WdeP@(7r{0_;HL57NxyfZ#pPbBPHv&yvgyJ z_GNj#NN^-!SzE7?ugUhjUB`bnOIVXFgvpGoFsQb(RBAL{ybo=D9u#c5K5!u&nat#ybXY<@{4j-?ck z3gL+m((C=m^ru&=1%O%W!RGiMmS0@i z8>LfRM7A-x(V&XWc?`A+97)MX%TQmmGwAcB71i|pD#&K-CWyOi{4CIz2g4mZG|mWT|)i5(K;+ZSWs%rSO~P#Cf?t9 z%(w(Cs{;*K9JO)bXu`6E+r+%J!JZt43uFF#d(9l+}9qipY_2s5ZClCO9z~V~qpOo?%S5Ur||x(S7a~fx!&9JFJhC zvIc#f4O5&<+oO~u0!2kd$ji&a_19l7_5%Y0F=)^r%$P9)u^n6D*DYV-(;xnUG&0fo zdIyX3#bn#EJ>P*d!Q>X6N7A`NI6yYn<7amWN!TpNG?qZA4jHen8jPB}6nfJ-<9T{B z+c3QrgRKL99sZd&`E8y>#qoi=4uTGj2NO0ydF@+UbVip|JPCN(4hXK3_YT9pTPpBkJ^OF?<5G#xuSvODI^j2B5Pp!kmJr=qK~ zVfn$SU)(yKl-_jn^{*#m_i>Tpb2F6@aAG)m(8V}Og}?n|x#B?L`g%!JMOcA(%|N5S z)#5MzZ9_zB)4YpVh*%9yB3xcxE=cR`ZJLs(YuB!#%$zwB>FMcW0goL!b|5L`XPk~d zjjXIJkr|QCkBp3judgqnqoWbpA`}y^?Txs&$rw0rAl`ZBJLsA4H!xo5fXpWA}^NKsfn zB(Ux9Qq1}8W@@M1@|tU|5qfQh4z@3uB*#o>#ZGYDrcIkLYu2nf3x7O$^H&&Cn@qUQ z{oDF*Y_|C6>$X|4aSpy&{Q__DV@8d^haY|@bX!{>lT=OCRjXDB-FU);2`aLZ?n?IG0bQq<>Y=L9 zkV%~dSZkuzbLI9GRpnT*{bRWqPYiQvvN9S&C6KX!%5Zrnm2|l+hnI+?X)-Jhz-(Zj zJ!*zg4<_5?BiDb56Dhk89TbGydf!NH$|Z%O^kLxo4llv^%B= z(^sE9eGnRIKJy?;(#*_E96o$lSR0luU5fMPwO5|;p+Va-w=N{TH`Mm;sUdnTI$~_N z?#i8?Bd<{0BNO#wGyyc0O2FV$cx>V_<}KtP>A5raYJaSQ2x{>qP#~{31Me)ljmE{9 zcxdQ7WWx+mT}O|zIjPv6dI;-JY=KvxH#&FejJ9pt3VoLKRGwm!o12To#6%o9azw1A z$m!@Ut(oBK9f;}U=is_QkE%YeZr5(h_o%PMg$ITI_6O$#uBzAsO1tnWS6 z2j;H*^QJjCaB?+ovUKGoJDoCETMLaXVHupOlDnH2xkAjNe{45mTJ*5`!0PirZP8=R ztC@QIb>tW9II?lI;;}x`MQaUF+b(GoJ1Osc%obEaGNe*m7oojK+5Abdygz!!=8hVd?B>1ot>A0 zH@}^P@=6h;{eM*6M-#-wBrJpH%y2qCqdYlCCMC{`)vX=mnGeaWJ? z;eew^*pTb|clpCOmAZ>JIqASODB_lz5-v7jIf{!(YZ6qjC*co-1xF#SRbQKlV~aGk zOSiSBje_AuX{^|NB)SrsO&})#lW8{j; z>T(g?FG2tfE6`jll3LR@>r3|fT09)3%NCIPdG=T11!*A#B(b%|_C;--5SNOu9Bi=# zl0(6`z86b@t%rU>NMH-JkFvb>fZon}bsJ4m*h$WY*ofur`kt?;rYOL{1+R3lW_JDu0c|{0e!!@QY-pqPl9;=ey+qVh z5iH=ZLyPg>r4Ps!fmf-_!DR+#A*_t`RCq?*cqkU<%`dt@Zp9TMKR`@WFOy~M?UAI` zj+-Ne<}q9KN!yw=L^BZBuy;P@(;%z7LWBm)royuwoidAdL*Q(LWpEb6pCHXx-~|SR zw5_Nt#Ub*F9XYcJJ>tfQ{Hiu}$@Ofbx6;5CUAWHm?1XI&6g_qxS%OcN+>NTLaz2IC z0FJ`C?8wS+M#3_9;y#m>;}1A>xp8%x?EF;xbyzZewT-;WWa3|a32blI4lCEP1vV2{ zp-(6iJ##hZK=^U(OZaA`$Q8!%g3nU<1eYhWB|Hd_)zBsI@R+Ii#`q-GvR z*O;r}?G>PTazh72jv}Z{AOo#akN?+1r&{^P0(j8WL=CA?GySTrd^~ zPyNYNe^D7laD8|=B{J2gTVsQvl8&#s#!REdR=B!RVh!M|CI_a9>J;rn*gI> zC5w-|Qg?LAKJ=m02nuK+g69K#Ls3SiFkTe7y1D{2HC4ziNJU0YqG16(O=AN~bWUE) z;|nHmJ}we22f`}B5AF#nS+Cy3U0>j|r7gBg+-%_#$=vk7g2&xlV6eGjp5Z8Jj0{p_zhMX;4o&jhO&H#FG;5$vf0$2vZ4g3 zS;vu(v>ER1-k3COmS}Iw12(p?p|QP$v~}9LJ*r2Ylgymx2q~R<;Sd(Aco3&kcgj^9 zI?5RlIjy>Dzxs-!9kF zXgR{{;K&?Igr?rgaXXHgYYQ9O*T>eYak~AIxgKzL_e9T*lZ483{LBWqnjIG`T-Lw= zXljJz-<_QWiy#@vaaG(n%(`m@M)dg`yu3_T4!0^j?ev1#N;azmHZ;&TfIW^Ro})?J zG5Tuwcm)X38tk}(A*pNY(Lgjc!g8D^XDkWgCq8b#qj>C=@5shzoX2R9Z1o|iC1?^; zqtaeNG?YO0NPL?wrg35A3hT;d9MjRh%@ARUIe799xt5(e>})uW2B0YsmQ5vW=M$Fi z+sDnp?brMp76V=`20>LP+H#G8!t?O-^s(89Z|k*fKEE;b0tI4P^&^*ZJ`z)R@GgH@ zo+Zsoh8LioXexx|JVsx^5T!MCz$3Wr8q?VymUK^B>TEi0hdybk4NqZ7CWk;X)s!D= z=b8O1!%3Gq4?&Fw2Vg0v2buz5IoN}Z&@}pQfoG?$M3BFYI;*Y697s-E+VFs6?asMK z&p3+y-ES2^D0ba%1EH@v1AX~+`g@v6BJ|a1(AN~wfntfgg245>HtMRpEt_{i{E6R? zUnG(!%ZFE>9&mQTat!A@ND*1+(SAI}^nbWsCM%O2DJ&3lZf4uXYDfjtQx{uPLf<0;tRCugK6QH)`jQn@r8u9RAOcq|T>Xrk(})afZ}}WWy?iV^^BCe2R-mS)8rKbfLA$*TFM*8l zL!QCTgWuuLZT~@RtAXg=euCw5^_4^>l7^K@v|R*U4HJ!A&it)BiEHkpIk~q+PKh6~ za?jvk!mrqSYzcDmQ%ynyy*<4#`+<)U8rTe;1v&&Clw?*7iG+&gH7ILUN$zW&yg*|l z_x2|Dpf>5*qx-gs?5W)4XUYq;T!5W$b_vTaSCPhuO;^(=exhj=&}-~y{g7u;cO$Pb z4H04Hmp3x^0CaBd`1ZFM$jnW|@IE}*!OiY#DBNsiQ&CYF@MI z7`c*oO|1&}rntfHgq!&5eSs)TJ$QD_Iehx_OIWu4HEh}Y1-VGKqqHo)&fQeg3&*MM zyT3MrGNr`M_ZEH}_ryAvv zdMw?%5DCe9p ztfTh17Ay|MrP(-@@)tUG?246J-zFF2WWrd` zR77_SxM~_Y$Baf$U`vvexhO3;hvoq}2n`5Be&1?I53i{~a>f~KI`AiU95F8K%Yi05 zeTCyvxyxmerYLTKvyiy$II?LRm60I5g)&Nt@`A(dbk=FChWm7t?phM6P$(ZscQ|C@1``_ac(wRQ{QcO_ufy&K@~8)ox0 z)D8I0H!JSPw*B+Pen3!UVNt;alL%}N8kBLy8%D5y2s%Y~L)(aU;@rcD`*9-W2oph0 zs>f0PoEM(ET3hE^u|7E^&@brWu%PfkRn=8)8M)_3B1)eb(X0c#Ja?iCxxiyv4HC`{ zT~z{=)ASR{gyrwzD+X$M54txu)5Y^Lb57yyd4o}0k|kFyg|yy<-Ed|JYm#ih$J>~> z$ZqMHd~QFI(sqlWjIiJsV`E!%O}Wk4a|fiG^7qE;ue^dGR}aO~wa??O$sC!aGkdbN z&tNjhge9_m4k<%9M>nyvBtRxBBfmHgyN+(-!j(yo@-}ed47O|5cg+G;biXYuDd56Z znOCUX1IfsbjO>&6ZR4w=w2J6~af2VkkWNvMznMp&4(_F-a)tB!uL9_*lTqz8kVq@p zm$PzDWA1kolw@TrMLprnI;@GL;nh#-=$SCDJ1X_h)Qm%-JMtsaySsat)pISCw2VWd zZBlYFdR-NRCucq`%u-2ddrdyeq`*An>$Z;`uAKeA8M1js>R2kQY+nP3Je=tDOGwGx z*e1;>%6Tw48q&je)B+oAy5V)9kIL{nbP!kYY8(ulV%kwl#tvdD#v){HQ6g_1nue~8`vj@k~3X>8%n|c|GMl zv}w^>X;{A+QZfu@0rzyyyyv7E#Qe}kHh)E?BqbCule;{y=!^1zv9>3 zdfkv4J-odAq7sr1cRzS~Z*f^gDGQr@kR~B!z=3dP!g6XHHh2qZlb$J=?fBu=nF?+0 z3UuJ`$(2UeBzt*a%Xu2dqN4`j7HQUPX5BCYiZ%C zj#IFL91?~p)2u)8d^KlZqA0tlyy)}A8@|ECf-|cG4CAv-&pa%5Gs5y$r`t6%@X9Z$ zogQCOnq%`|NO=QXdUTp>tlPNRWE?_L_IYfNUx4hqWSi9jsM>^wbwEt3f%2i&kdocG zpn|g#)|j)YvZ_?*qb3AUky8z>lg&{K&RoKI39J%Hab3A_XmGSK5ACK+ri7KG><}XD zS9Lxgu>Mjoc+l0g{k6&}lQnr=kl^fu-5_tc>DPXoau;=ji^ioxs5K?Q~Tp8AXhB7%bkHWNkqE|GWov_Af zu&fRwr|&f$(2`cleVslJ)n)Obn`XFsNI}V^zI+@oyE46lzrmyM{zJ=dF{mji%t!o?#nTx&qGQ# z^xgciaKQ*?C#*?;FUOEtB$^;Z4Y>$O66-6ed-cJnzSoL?jo1z&L|a+nnw*Ea#L;>w z+{v^xVfep{ZJz#zsQ`T9u}4W~-ijhJrOEdLjq6%^>4Jk=p?gQ;0FMJx{s9+^aCX9) zq(b-b@G)*{4U7Z{^d+KjaI<@ zO1|rAFFk6!^+-rQ$S0gB$UDdL;c-6en3?lnqFb69+54ty=cM$bnB9+dMVvS;WUni)HiRE2f<%QZF2^ zDq$_*>i&GcEoIS>mccx?EM;b&9z3--ZT9!>^<}yETPV4sa~KC zF=ObZFj@@wi*s0*vkRC;hco{G?DjEA*Z}|LA{0+eP_3@LS{H?+EFk&3agG~{SvJ6K z=d3GvMd>z4J4;07*n&H+{~9fti#d`;(ysVzC9a=531`yRh{Z0n$f#Fc%v?avP?EBK zKB4mM^rP|+u5dUrVcRKpq9S_KA*;NhrEjKQ*e|zWrS><&_HO=f-;p2de3(7bRX*

W@LX^PyGgL3(c`QQqLvl5n{zA+NS z-!B4@)b(2OPAS2GGAZYb5_YfRkci(aEINDatvvNHNNYP>ad1Y$^32Waloh?Y-csiwntQk$jdeJ|m}iXjSEc`ySuC841hFa2hB3w+Qb-=A%g#rJvNL>)}~-1SzR&arWFE`4E#+y*eLH*&vTs zGSV~R>s|-8OIf|fc{(z`^}T939=dBCBARs)$1`(I;KOTN85c~ZA)GYk*ixej55MViaX3-Hr#_anC;#e7(!zV1CJ!HYlNdE+94*QQ&V zjVnwwa5lp7+M1o^ozA4Y#zl`3+iK}k`#`JUU9|(ArD-B{q=&olF|*?Mt;T1t$7?lj zB#^|*BI#$3RJ8^r0fPct;hvj+p+TdRcObto9SasuK~b@--043P3;@7!lSaAcG56V6 zG*6am=9+uUm7_?=ksO5~$2KC-gdA&fB)KXoW|YZDX^PSuv7EJ$lwo_2GxhX&{)Xqx z@8$1L_@-iZ$(vv>h*-U^(&M#aC(vI0>%2smCX5@pdHBdqVz<+{y52K4i%1g3KSjcN z*Mc2TZ);7BCG{LbBOimo{Is#L;_())OzBlqOn5`RwCqIgKmdJsx=OX~=DUrBVuJx@ zixITHSbmZRXFO5R=C#nH)SILbGC__uoE-5(wKz%>tvofn#`WyMyQ{UDyCU9R+wnER zMIa(nIVJR0Ed!MomJrM<%Iae}wOmhORe8yfwaB9x4RaUH=7m7hdMBQ!gp2sNoeD($ zw|$_S)RENj5;0;7O-Okt*>j{8REa}+?dQgtbM^DJMAIxK+uD4$V9ztr6CS?hxq4<+ z=|+ZL50<6Oz$#NWoh%A(#Tr~M+k)0g%hc>yj+&bDuS(g0*x$Qh_cUx;uY9vYN`nyZ|Utzcdh`!!ZX+222tmYxu zQOXL_^jE{e(Hq(-B|u-tW0IVq2+rs%X7$ZqE{+}wfa15+b~t+t@Nc}D>T|-rPV+r? zHGZnEQ}rk3qU~M3*FhQ)IBN-lrL98*u5yQu9^Jbe>d?l3$PVVPok@3zLFgp*aOP|y6}mum5**V0{1RUo1u5ZO7^!z9f!HjB?@e5gB^ z>696{w*^#D7E?#>kyaV;HZ86$X3Z$?qwag!y|U?AEUxFo8xyLu$BO^2sXCD*Sf;qg zN~RF4X=eC>72_a{)o0!flxuP05EIt=kcH5*M1!z#I}f7`Wxw>qC}QpA$)s1qY#w%T zz<18l5nq<7^VId!b)K>1fZ1-T-c*NEb1VkJ@d5skQHbKi4jCO3En}p z0@uJUQXOF->YVkv=ccU++DqZscnXmopd}r@3Zk-!7u>yq%jNWR5|ofMI`|4eZy>;C zF3yJ*YMi*VD|SQR@&vB=^*)+Bx+6xlLWF7Kqq^c+Gjc7dGU#B=$f7dkJ`YO@cpe@n zD74>BVGvF{Hgct;5ub@qmyDHu!RV435mXd6P&AWbo}*{pMX`6P_4} z8U1>)k)lz6CR~3JX$`Azw!i|Lb9&yj)Eh)1{WWfq51*rr5!wmgF{{Y+NdQ1^-SU%l z$rpN=4je0ps5L9#MJPk=BtnX#3pFXpJK>%_sXycFK#&963|W1gX|9^&81BoWGic$s z?$gt@e$Aqfw@mH{r<#(z|A-@e-E!8xn1}OX1B`(7K&xQ&Ub=BH@$+c*j;+{o)X$J#^c%5N+-~&_QK-vq)D8;i zB6;h$gG?L}U_fzOl7oL4rR-F@IRCjFB(k^cFG- zj8;R*kg8a0&BtpfiMI2z8-`17UU-2PbdHla6lM1Wl>0)4=-=(^|1a2H&Tl}IR5NuB Tkn-gZJ0pOxceQJ>3C#Ex26K4@ literal 0 HcmV?d00001 diff --git a/airbyte-webapp/public/images/octavia/empty-destinations.png b/airbyte-webapp/public/images/octavia/empty-destinations.png new file mode 100644 index 0000000000000000000000000000000000000000..c985d0821aded07ff8ac83ca76b48d996ddca749 GIT binary patch literal 17703 zcmV)@K!LxBP)`&z>>U-*YcE#rdeZ_)2{oAmBSO5hJAiYU1frL&- zNP+a;%l_xg-rel(?a2+?!2S3QcXxJncC-8KnKS2{nE{+|!U-pwaKZ^EoN&SkC!BD? z2`8Lz!U-pwaKZ^EoN&SkC!BD?2`3t%gA>N!rvLR}DN6s_pwA0rshp(>mf~5;W~rW~ zVwSR4s$r=PPP7P|gau+*>JO2aM8Hlgb=M#BWhoqu&q>g43;KPXB9BO#!%`(ng%C-- zpQW?_yd zvy=u=BHEnXT-=2q-!_7ucd!r^*iPsW-c|5$^AZAm!vuFXPeG@15nNo{Y^k$^rOhne zj7Am22@AMdB`oQgx3F|KOT$?5u;wI2N_39ykMQ6Q=or}t@sWMu;^K-Rzcz>p?F?V9 z0Ms|s!IkCFqpPW_X6Lw~UT8pFT@7?DE=b8bf}Emsq-7@~yC@B3GY=uJBon0-h2}0@ z3Q_WY&eB?z%HV`iTy2CM%F+y$9%m`QT*1!K12{3GLpz~syFuvEVJMgc1(TN0>FoDd z1VLbguEVAL6eOkY!s*O|IDB>^GIP%`F)J85<(lNesi(!U|<;MX)- zD`7{mw2-CY#tw!uk`C)W6BGK~#hZRlcW<-?kovGS@jGlfvJ`ty{06911~Qqw%hEe6 zRpA;%Ya=Y#L|=y}RaN&+y64bt(=ncrbZFPYX)`LmaKabJCFT{>beT$A#zTT zWnnk2L9`aawrA;Y5Ovfb?ZGbX2Vvgq#Y~zGbjT*ZIk=E>9E;aHg+pgHD6@+owlJh7 z)Z-dLYk0dOSo(pbNbP|hF(Yv2_!lv#%haoms7eUTY_@Ly=lJ6HS5Q!TQF(|wX{5)W z#x(**e#>nkN|4*2F|3d5c>`t+dmMc`j^(z`YX!CSHCXebANo}r zE#^psB^EA#*qQK@GIi|V^dXPn*;|*OU04E~5aGkz#AAEk3Rh+gNle))W%pvK4@;|Y zl}cJiQ3*(zegZK_MMgw$3?7~F4)aRg04GdAz=lIhv3T_pD6cA(GAS-$BCbMvts@YY zeE#G+AE3}Fh!IsX z$V)|J4e@UaC#(axpa*oGg#D-1qpXU|v^+4KVl1dXaMeiKD=A^gUrN^S5z5RNLm$Nx zv;TKh+f`c+JcCa)B;z)>Ak1Hi%NwWUL(pV{j2zu}2`FL>ZKUHayz+`N-q*ghl32ApbnP{OeDf5a`>6@4I>c9T=s<;zbFoMOiM8{2Q8t{E|$p z+5JCkJn{tw4{nE%!+KzF|ITO|5ey%1PgGUa@~3g}QUQ*iynww2PGQa3y(lU!=jSK1 z8;F_1{)|ce?}g5#6?F*N2l(MHf5DGC-<5JsKpaX`f>wyj_2h|u6j{_y%7_Sx!Qy%Q zWwLrO4J)2~`vGPwS!t|>OUGdos%k5+Y~vePvEx6u^@ibi?x~y6r)Qj1=hoEJVe|Gx z{PgP{EMKt|C8d?pw1D65oYC`Lp8f!AMZd!hjvDESZBM$^;T4s zVBX@P>>4Mete04N6Ri-Ji?B2<=_P5$#l;Pa{=Az@ErKU&`nyjNNoxwE4jfs*y1CZ& zbD3oMW#7m6YQwAW^{>bOz4K>Gn=%MZ#o3fBJoEDBSk1^A5)jE;#a}SH=VUY$RW;?< zba*MY99fPN7xpNluVi?!2hz9G1dL;9Q=jg*(2%h@LmvY%@NSc@PVi--SzsXL0AP zV-eFX9MMssaCdXX`3u>|FDOOU#RBa3BMJG1rTm;=zX(icHtOla9^>0h!49Uk|6|#$ zlAit}#L&%F6?4$ige8)a;aVPhMRvlko;wF`Pd{$5A~hhiqnR3PBrA>oe`DD#%vHG) z&pdS#UVH8iyX&04uHTP)9{CSSN~*B%p$$y>jkWsRZHeFEy`Sd7%c~BnmcEEl!+V%4 zTU%R)#H3XG_k$nt^SZq#E3ZO0Q>C7|=_3s6JVmtm*@z{q1Gaq20%><1ON-Eo(9(9; zTUmNqneo_+chNm&DBq4?Cab7k<}0{#639BH&NF4oDPcYS7nfX*r1Lv4|D}8I^55q| z=fhe)Z$=8QuI%$+_Vap*cW$2jJfTxGx^-!XGtgXg z>D&%w71j7{^EPJBtwFy|6D^Ww<&Jl-YUh8j?9*p4X*_wP*@xrZlig=G_CUfIaamLE z!Hg;Oj7$yXs&hj_0&x3HqY)R|7F$?H@X)EP%<_?iL0zV)OoEo+;qHZqkXUBc;{y~( z5u@1_x1*JyB?)^cOLL_iH)gYX`o@nD6&la?JFxy%V5I4j$w6>_4>&8ewoa|bhd}nm zb^AZVZ8wd=d+)x--b{?~iiyBPSfTkb&LfBRz^XO7us3NN#`L}w0Y0H7%N{)aJO1_k z9K80zLwNLw*F}l%!E_ zcSA~6BGPkD((X8xzJWN@#^KSDgeBcbW=-Et<1uH_YkZrjuZSIL+}27UeKloQqeVb+ zPtrTl>697beqKpB-duVcI(3S`&)+YEXV74nMO`+l+;l9+kz*-X#|Uxc*m<7F78~2a zu%qMd?#hMtnzg%8!G7g+{& zN%gxhsqyjCM-dYt+AibR(Qp%k2Xfi_N+DmEHBluH^}+?q@5HX;71+M^Z44PUnkgd# z%%4=c*?_P_%J)88jdxj+oCQh*1^Qz4^+WOM-|u4N4VUw)s%sG%dpmT3H}w@=3qp+CNhcIZ`3@zw(uLHWF=7k7vF4R<(}Zgmf+>-j$uQ(p}eYy+f%iu ztSZI&1D~T`ua1~8-G~HkXu88`=CmPrYVIr)F&E%x>;A5CD{JTWgl3ymh&w09rg3_d zccK-bB?wE+%QR)i$X++e4~i@U#>NnqGY#tGM*RadLRO_ppi~RiA6UYVKX|v^h~H2F zlOUp2s%_;&*0Ix{N9u)aSOX&NQ_p{b*B5>x=iPqeX#RUNWt&0yO-Gg?Gw&>(e*6}8 zzeX$xRn41H;MKp~fgW8uaC6zQ^J3pgkp5FB|8kz<(7=SL7}D)}smNcU=pZ{PT7s}N zFRM(s$dHb#2R&ZAn5!5kphZ1X*HZV15m{O&398j)E0|Q`lsyLvMuBjJtF#E1IO-faBAZfgHr)Y1BY8%3T&tx7lDEIq;FW|-;0u#sdG`ug; zSB>b>l6o?ZeMrGEw?P*)ohNDq?w|V(zi$a%ULJUv*_uhZez`#mf|H*S|GQx}LTA!| zd*!MiIuaZjEkW3R%AK)&ZilD458q}!19%vjmo1I)s4PdFCp~o4woWZ&51ih>U7XZ3 zy1VP87j;(3c8i@DsktTFYx!HGqGSgBevu!GyVHy;8 zGJP-lGIb!bEwdn1B#NY^kEiOmQRzx?3vs<4;8L7E2<68_ok3Dd==js@xOD8gNdpkk zCI~0e_9){8RL{#mFVLqUU1un*8#Il+Cma~|CTs^NM!u4sG`3IUN9z}6cmk;cc^WU4 zN}?(rx{HReVlNk%IsAI=VSck7iVRt*8j+}^ z`|`xM1*Bd|;-P_E6Ko$@yZTrAO#VltFDb3yO|pxmoa?36!PD#b?x`mCd%AHP8{bhqTIb;08)rFZS z6-8N%`zo?RLsVVka2ZO}E6>VL;pV2`AV1g&YI^MfB27(=7(ddkO;?q^m{*huZ>D2* zXcuO-4wCk-+elUkq&AS0A2zrv|9EqYGx)P1n3Af}F$ol$9@H36Lv!!8l)1CnuqR>3 z4WMe9Ck>b*Z+izDx?!`K3MXi6;|65a5LKuDE!2;{_w;h-QBJm^DIG0S^+=tso-rfk ze1UmAa+yOTIx>{$uc2n^p!1ufOTV^oIloTVE@De_?qXtNrwdSRD zma-A+H;dMRIEDIC*HFvsqd5o;wk-XXcHO3c$i8h?62gMTF-7UDbcNQ8 z<)W6Z5@|yMLJ{B_jJ*63WM<{VR?ND7DC`C5SYAK)PpUk!J<|tUTUW1gnYJXnyu?m| zkC&gSqpCF%0>*XPJ{@J5LqQ*2a6s6Lu=Mw<+SmZ!M!lULlHPZ~QOx>FbybQpwUrq$ zX}K7v1uhuUeLN?_x{U{6D@ejpn4UdAW}!~eYFn_5Jz5It&dDuCQHkje9;qf|>1ZzK z@5#*nx7Eq2bs~!IJ+y-E6XuBz<7&LRB8gQ!h_nW`_=CO?4U4Gjh$=VGs3$ z0?K#q7rk7h-^_u4WUaH0*ySq1b zxJc)eoj|OQQeLD!*&sec2ZP}-Fd`+|1Vv!tAkh>2-^D+ny2|zepd{Uhw8jFoje&wq zT=b%e<_@@FbGFZ#W?A zMOgiJ+z2-XOUd3Ud9+FuRvZJ#s$r>MEL%{Jnx)vtF8I?-F%bE=S3YM3bj=W3D?x;O zjlKNyM?d2dmk9Lkf`|Dp?XO!o9|sPf;VB|G7BAnk3c(g8w&)Pv6_KGGQC?An|1AE= zY*|S?v4ynnu=}HX{^W~wd{!DS4yN^zM+w&ocldPP3#Ftqy=T*y9y z(-)Gka`i6M*9$yK%95AK9FCt%#}m(egnzxaiaRG_BYNV$kL~31uUdli9mYlW#-_tx zqqMx3-^=9j{mq_7l78yM0i29O2Qdj95sa=0?YP=<-<=cj;X9A>VmK{f#>$jS_Ea%<$F-CO1!C>V@ zYg__J=eOh8{~OCof4b;NJb2F}G#A-9g}C*;x3K+>BtC@x#|4RqZQGTPG^WOlS|Zx6 zuUG%lv4(n0E2U`eo>(*pXEL~5`%9MYgM-0d5|%V|T0T;q(RDz=WW+}qX*LBz`=3r} zwnF0u5!B1wl}lAZHIuaGv&(U@uo}LeZU_tTU@t(2wA@M*me#W7%nM`s%w|%p7Y?1? zg4}{UKC<-7Z#LoZv2#dC&1NsHf%Sw%Ck{T_U0aUhX@6iXb8=KvR$;>EK4|JX4u_AP!*w^m z#(&!cMd79U7GW?sQS`?f6zUTFO8X6xdZm%D(l)hR8xDTSBy2imZO|u&g5~*jtONbM zszAZw@?7;%WnnF81<}l+txa$dA97W7JRTwO$j9&dKgYs%R^X3)C-KSq^N$rJM$Ctv4kLrhZ;R$@op`LN0<#A|s z1N%ZAp7tNsG}mL(p)aw0R}#7ndW5Mf<5=fzCc1QL2VY-9_kBT88Gc!}55N6>0IOMN zvZ_jqPU{{w5O2@@4jm#os+#r&g+x^~`5Hp2I<)e$Z$NHgx^!uWT)@L#IxKYgPL!5CQT`iY@bOPMW|uQS?~5iOs924L0L7+D@L%72f9QC zqNK8xsSB0tW6>cl%wKH6^)E#-TF^cX4lF9mLy&(P-fa4K+23uh{0Oz!c<3uES^Ia@ zQu5RxqiKkOgZ$y?;fCD&Vy<@|I&zjz)*&IfO+5ZI;}u*#;vRmEq)yebPA0WF-X5-^ zuBy>tr6fqdN-3;=HZ)S*XbJWm{~0GU_A^qJF`d*{T&#|Loc@Su+Xb^nKZf!BZa{!vu&n-E zET}>~+X?Y=XEs!K-l5`BR!dAH&qJ#Y6qII5vl^0gy#NPY}$LE_e*R)D} z6*L)wg0zGNh#dmTlIPoE1>(cSJ|5-8hWRz>Zh~N8U51`Rs6)HgyBP?B)fx?p}<8Q>TlTM(-5W&+JKp zw(V4qR%WxlQ>N1&=h@m$8x+J;>1FC)!1%s3kWNv3WabK5C@lSnG)eK4fe$tY`RMJ? z%HS1l%Vw=gIkQJs*VUs#7}M2t0&n2nU2c2}{~~om3|UaPK?$t4YGD-l8QPdbA7Tge|V9 z#fkJ1rb4(Qi0Sn{%=9N{)dIkz^xE6(?<^>2=D3->!XYo1~3Ry#XWjSRAZ%Gt?Z-Z2(L3|bsjKL|{r3hq%9GTFD z>#r%of7*Ul{nP#r0MTE;Mi@2LLx=k0u$%w6Dt?5&&JzZ&& zmOfl5^iQX#M6xltuuc`3^9W=WNKP+CWUx0n>h<}`ntJy4)i9g23nwmV{*t;Rs{??+ z$4wRj{W71F?XYvj+LN#ZRj3rr4t+N3F!vG>ta*vj3p6OJ$vtzc_r9IkWv!1DXbQBFRZDc8Ruxj@H}IaPUdLt1Zto6a zbQMX{JZ9!AXQsWVU|(io(HV9;O^}s#wjNo|bYyPmq0Uws92E8(gh-%u3gy60YG!D< zNpsM?EZz(Xu*e9NE@tAGcn|sx2iwlfJB=^bJuj1$aL+yWAm#j7yz%d6QQ)~7&wVxs zZ+&|w4xQP+C&Lgl*RtZ)Ox~~dS5AV7vV!9NrEP^RRhxo+Jkc)1k6Tb0OjrnvsZF%M z_k_FzEvW;|pHIJqU~j^bggvqT9bk+EN~vHAd6m6j$hWbO8ecxbwyzwDC^}ivkBeI+?qHHaU5>j)_=y`&XeT8&X zet61Q0m-w~ps@zXzfenAV;^4`bEF(=oZEIA92hMzWeUxnAy2qpQ(J}c{qKx)UYWn2CRnhB<`^b%4p9>Wo4b3J)s50`|Y0qR%+r8*?nuj!1cJ_#d&_2$YRPSq=u$x#FCZOG z%TC7J_j>8#ivm^5x5M{McD^2=9)r9fq+ql&_T==obM$ z<~b9Gm^5(vGtCv7Q<%ookW)C%df%0GMR4_Mz=h0oR9A|_v11~7V$r-rRmh?ot)_s^ zV8i^2(|))Hjg+9?eB;X**njdD+TG657&tKO-RUBnVTu0w8B)fPl+8_&sEV)x)0%;i z_SP7G>9;*2TKhKL_(H^5yz_KY! zN}NkPhrGNzR905<^CKc6;O*^=n3xy@w++U$>-r%+eg=jN8G^Up{tp%`_=j9?|AeW= z<7)-AwZvG=uUtmJx zWWombZ|%#m+Tte|mRYfN0lxbE1=^&+_i=FG4i~}Roh}I*p;=w;vtOU%0UqXHl@t|B zpJ$B}AJ${G0ZEiNb>IXhjGKUW-+hiL)`nH9R^ig6OGe9ju}<4xZ(Yjt-eBwhP9vh% ztRu#Xde`p#5Jjc@9*L;$qZOc~R00A`g-0eXd7KvoNiUjr(D-HTL{PIIfdWP4Ie2^d zZLD9Mi$_M>%WRlIntB9WD9FI!jAPh*dIvoGJ<+vWS9Ivmf$OuRr_vCc!ootNq@*A@ zIT@!;okDSOG0aW==I!Z^Ia3zk`k{Z;T+gs+x90C?s>Ovmh0lICgwG|*@C8fvqZObf z2}@7~brYVv=~GM@^q{!YOp-R`MJYk9*+oA+g43*1_}Hii&?Tai$+9I?B}gyK;;Fc@ z7?rh^aC32kkEbsxm`n3WW)kz~7cfa(ZL*GF|42N4*D7@B(EOPbZ6UQWY(Mf1{`>u1 zv`MB~a;H1|Du{%ZCM?Z+Lz7;~LuY7Q|AL1$qitxMNqy8>G}lZf;dts!yz|4o2xR{E z$=$|dXxu=X9}G#={PF^v$xXq5^h6xbI)UOU(dkDMR7@K>7k5l}9lqW{*42lWLw-pn zo?g@sCE~a{T5jX;K-TZX_KTY%|zEKR`Mpaw1g$lRL0cY#7Y@7K=J*@_anA#Z;LOi zDKFF-J=V0EDd3OfAF(HS6_QhTpp;3W$bfLfhs7b>KNO+9!SJR@(^$ue)PzE&R^*ix zBBv~ukvtP+wYBU6h{5DRf5H%Ek)YLC?CsgJsV?y!h#86qfM#?0-UB7r52n zauAjy!G%F{wu-S4wDwvcE+qOtOvPM)3cBBIME9^NysTUkL<$p;vhYN zCRHXKHV~1a@ra-$N;^+M9P3#6dIh2>u*QzsoO(!YdFT5(u=D75v`Z78(8Lk;Sx)CN z;c^j{@HR`&Dl__bo`m`Lua_sNur}x!wv=s#lR;b7UXoHURk^e)G+s;l+Z^mW@e>v< zy+OJnZ8UHBnk#_Ixn2-?zHVYko}TYwK?>`TxT8nhXsar)NYG|VAmwqZy+>y1u;rjV zKL}%W=NF~p&9A4Ux|RoN{~t^LMJvSRUf_zx%+PdxvOGD;OiF=%ZIIB;aEVi^bWwv1 z3*}54Ybsd;D-PI;gw3hvqt%b%Y{ov?q)7*8QLa+7LR@abQWO`FRuxom;M8vj35-H~ z^Z=`gV~sSmNw>A7jDq2^G*;|46)hoM3BT=Lgf%<|`(}Ih)&j+e;6&Mr|ozHrItqXL9#5!y=a6K`X|UTJw^W zeOj0`R?4WYujbMHJOog`0?p+jsU>~0sbp`e#lv2@YzcEe&;McyCoRN)B(m02p%vpw z5tf22c0hC}$m@HN6xey}2L$=IMW-n9i}49I=4;CsC54UTY>tgs+HUIkM%5GrD46`S zUmi!I=paai7@wSrR*oyVs&FAoUqf6_xqD-_fWzz|SJsrFPy9qVWUo1rRIrd~4@s;g zVsoXde0~$eHQ8Ig`t7gSu%CjEFHyFfGfKUHbA2jx3EOfATd&AgHVqXX}kD* z2Hj#t$Szz7O(K}k_+v@J*-E%(N#$m={iaS`z4L9X+V&=GlIwLI#8}s>0+-WqU=E}f z(ButcvQc78KWne`aV;_mBx@;w) zQ5C@q{y6p>K3esVWD$6cr3JXk;3$NZu$d(q5jO>j#d(X%vYA_P4WAz%Hma{`S$kU~ zsiotlNMZDtwfdxW#hRlTaBMmFIX-8dtm+yb8Ze(F8tv$ivuHO5jz(Al&4Ty?#1RX$ zzyO!FHMNyE#=K(5=eMDE`~*I~s#Q&L6Uzt|3Rt5H8`NhbtaG3U*qgipAFQ|sb#>Kr z3aJ4Ug>}`DmEeekCD6cqA}z%q(A4Gf>NNSq8TcbfG<|i5=%pI?*Hi-A+O)$$UDm*A z0xR?h-c8zwsq}wU*BnM)88rvGf71VmK;cCD730Mfz>BB=AUT8jrL$`A1~r zo<#T9QSkKeGkS7!J4N;)s8!_D=-`%gR+_l&(>0G`-JV56Rx(|Yo%Kgt<#2Su5~yk0 z!qQg|J7)u+Usf_bCmFkwSHjK3ommqG0%q1zFxA9c>JZu5lw0zeNk7dAw#hCyhc}l@ z#L=_sDT|g0AIDM>Yy;f_t+q$cmxe;nS5N9!J({mlw-_A7tYr%*Jn<=(9%4ye7`1cz zA$VxUVje&v!JHo2T=l_dS&d?A3o)%NyYKiae7Ir`EmTKE|HTrm3~g+-I+_5CoHDd3 zH_Zw{&yAiJwNhOnlGH=LAE5sw@)ko3P@>^YG>80Yh@IgBZ~(V%Yb2}$(rd|sL!)xz zDZ|~(3sZ+a#?m~52gRxi+fBbUa-yxKDqG4)FQ}rb1mA6Z0UP&yNGd=I#DRlkQ6VWw zfBz7cXxMKDmIgs2sVnUFj!EsH0i~pNtb^z|tH)(VYbLA&k`y%cAdMX-8?H}?e-vhq z`8&q;yANKT{st%5OkTmV>R_rKs~rqxkd=1|%ho-Eq;s2c>*^|L=={$t(c%%L3Xlmf z1{P-Vkyh%CiR{lVT8{v~wmf*=&pQ}Z%oJ8pU5xtr8ZD1^A2_8%Uzl z5kYD2 zr`FAF@?MxpqP(gQ7cLz^`lVw?JhPT>lTBO$3J9n1dK4FO7A=gcjj$5vtz5^_cqUoy zjEn5w_QtU%6=JW>i%cp-<8MBTuw_Ql5|@Seu9%}JNPt5 z9is;!E@}{$o>o+87Z|&*yrNXB-~ApoGo6}R0Yy<7lKV7Em(T*Zng~l?v0GWX5#rQ4 zvJOD{8*d5)F%pc_g8^M_n{H z^)Ku``Yrpo2s`6yAS{ioClAjpP>{4o$MKAeI%P?<7v8ZilO@C#H{lhL^YI11s9b69I8EH(S&H~$O7IlSQK8;0@yAHc{y_sC5qp*bckONHz$DFtf^ zm|(5F)dO2UklNZxe7pWH*u4KEWi~A%^f2tSx?*eabQuGUQ1}(%aAhxQ?nyWO%k3*M zpxZ3gTMjqbIWh&6&=f2jx8aznvaqszQ*6CPPPbh$Hvz7$?&y;+ovU0Y&u@{k$#Fr# zWdrPh)<#%*?&K_34$(-CUh$Lg%-w4+rvLr$@KCQDZc%#L=moQuY!(TuFCa97J@zD? zy-A!9GYVcFew?&=JMJ)u>e^;B6RnM~6z54ZmITlnpEUT-c>I=S%*H5>V>C;)rV!K| zj1tpGrmci%E`e;3_*PeDq+;a?>&jvj)6uE@aBhh?dgeDNmz+A}Y&eN#pfwSeOeJLJ zpqdk{siUOy65QRrtTy6XTesEcx1?U6Ky36t=29+3O2!`A zr9YNNNz;e zt!-)MZ6>MEd}5j^d-&99e6nf|y2edFkY*)BZF!vy_hB39Xq`{;sxohtj^u!K1|wUE z?dIl*Apd9_JT7yTBFLx%mX0Q%H4v79J;(^ndjDEBwf2}D$!Sd)>X2;Q zy9il1$1$kqtvm?Dru%Isgu3$(s&}*Bb1bDmsLzDZP|6OJi{#}5ZeraqRCy!YbVK6l zUr=1eCsCFTuR#;w=!B&h&d(r5WTAJbDVQ+m(I%O!R67=W=t@&iQ}tbKVP$!5uOL2Z zV$ac~+!WL$cC1v)y2=`u3MjB9wxNRk9XTNNP^a@UY*pAFSyNYuOZlgG;7WGE8D`)= zgf_kfaM9K9s$7{@RaaNdI#A3x)xf-|^%ul`flDat2K~66qo;qBawr^f37RmOa!an$ zWe?m8y;FcNaqwLErC3cQ1zBjUeonzTM1*uQe~uzwI+m4t0*R;AprN52*N=X|czG*6 z0tr)w{}p?WF2nj=|3+N&5cKRc)%>|aEh7_?hPA0|y9~MpA{ui!(_iJzxaP_7#3{{Arr@JTSKlDC={X^kiqJ#fYQD!wTkxQRKFVEsr?gN@Hf)5WOIUKb@@b67bTwz%2S%*|f{`6#5AuA* zeiW5vB0R+O@AgU;i=}xrHeh-Jb?KxY%5W5+3e8JO=gnxeeH#^d0}067-VR zXq|N@m@A3a)T)7Zx(j?xyYOGnY(`nug6B6}#Nw4N;it{7W5>Zyn2U5bDyxbO?xvo7 za5U9@_t@T?=q~I#a4TcPXR53lBQT|!a3VO9-fr=*7rea!@jLrCg~q|WG#gC{Y(z^f z-DwXHGsj9h-D1b777`W|N!pN>r&PWoX}_kf3Lmbx18ssk8m&Wno?M?k`Fe$*ZD<1T z-LKmGlF9S6*EUyfSf4q(Q&Qhhhc#PX!~Bo>@ur)v3a+iQK|lX6d*2>JQbx9_4h<;j zUytet0jstz#i_Id(!2zkzl29lB|=;rrF#9|pDI89brH^9I*pR}S`-bbLs_?alqb}q zGL~K2MSne}A_!*g-lz4x(h;|?V5JB4TN+X7$j z5UcCZ7NDQ6);x$^hd<~0egP4sWhL3F2y8djDWe&02!Xyq=n~Td9l|^EbCXgI<8*p5 z5kZ<%kD~r*UU+J1tsQSg`s7T1pMXb00>XyY)z`V?6kcQ!QM_h&XlM5E+=Xt;1s)eY zlsh|gb*ET5#~z_XSo&MMc}GofWB2BwUOX?i;4I$yY#7Qb@}#U)5ZAk~8ICMrRg(>P zdCD^v*(_btE*?f|=6)WO5fT_H7q&*%l*(MVctpG@Ur)U8>Z=$&Y6Mnod>(htpvWYh z$&)QzgUBEemXY;Ch%=N^bQ3vC{3Nmxipz_z@AxiCl$i=~-UgaDgY243U9*4`-LFe4 zN+^+4=2e#Ng=pkQPW~DEvh_8dqQiS*(y)gyyh{|Mzs#*v2iHoba)r_OuM+6$(@^g* zgppRXFXt7W!=hzVm1HHXLKET0I;=|4(CQ}!dL~Tkj!OMABj*@zj(iyDU0pp)>bd4p zX3jBQHZ?5`eS5{?sd-Ouvs7y4LDjWX38o>{+bL$Wa`XeukWC{}C$dD!_GF0Up^08U zg*dqz*`#TTa_WqZhxqdCwZI0c5VUGD1*_03=_)CmnR66BZ+V03qY`}1y1@68D*_I= z!i8ryz_sQOQ*+K>_2%dC=Ku9&684<38d3$mLlfc1gjGE(vBzsjB%Wu2EK(w)H6et# zLQN4;f~ou*{X;J*67S=M=bl7ha3Fr&{sxReuriU<@7MK4=`?CpChHH;ATE-eG=a#Y z5T!2J)5v8+H&cg}0bQGP;Mowpi#wDD3rceF>+Z(sW-HKLStqbec{-^oQ=4<|j!alur(XWU!b0NIoi0VvXBn5ktU$WdoO_4LWffJa&R15X zyoQgLsFxKN6^V8EdE+0i|AX6RkDmF}>JPx!J&qjssB(_5``|czi&Th1$%nHX8E|C6(qAkqaA(tZ zH#YUKUP99Hn}wxW{8(K>jhdiFh-s?Msv5D?J#zGzod56xw_)t)QP{fg17?a6ED}>$ zpYk5sx9z7itdD{?8HS^PTDsj3=Y5D_=|1+Cu3NQr>4_Cvzb+uXnnVAII#a|Z3-8Z^2#d<`2-OJ9s53;x-yNMeok7_ zVU>9GQq`%&!guKmRHqOlCU`haS&n->|)$C;}fgu zGKO9WGqHv_B=ke3Nq?mAYL31{QGQu<*@xe4`3jc{j;s<83I@Av0 zWOq(z;OK;v=WMF2tK|BqiU1mNYC(CDIf_6tm(aWf7Kx;&S1KGF7$eU^yKS4Au%eV5 z&Pe-(!Sw;l9|eIrUEMofr_55V$?Jp&Ml{O zqw=+zvY#5O&)+A42V4_K!frE=vM=C3(l_j*s#6it*tV%Oa?o7m zm50HSI*^uiP(ENxS}FH+LJ{h#5_vOCaCH-dk}IprRKzq^mr?1a&^7{ENwZ(_T<(rSS3yz_=k#OQpzh@;sbS!O;n;>Lr!1Cf%IGlAvqIflKvv z<}XZ!n@+S4`1*^Z6bef&SkzZd5tww$h_J4F+nAJn@`*p==&99>l>ug*8i3))F}(jH zN;mX9bXhnd!_f(=3h<>EQnN(kglJ$cLMDlYTGqUJVcdZ0c)&(n=P|sjBym;e!H_t{ zJ_=W6+L}806}ik`o>dFLCm;Va)0uaojG5A;`+<>q&3$x%0nzB0Ab0R6Fy&b|A;ZxL ztC|Yk&CN^RHr6rXBoHciqTpuNN*$jQm1(|}=TC2@b*s2eYmH=uvW((dy%T03L9fdm zK5_(4Kl3-ao}i#W3>?@Mhm*fEE2*KaUz0atz++0!7tORi7*23FI$>2qO8tD=nC%#e znWFEy-a|lxr+`ywN9ly~B}Er$JUlMtoiK48RJx_9&Ye4x^0Ozr&4XwN?=4!4FTY+U zA1Ae7-_b8&3IZ&v9&znQFey7*I)!ZIFTx29M<;9>X@i<|3V1drt$5PWG)_9yL{NeAWiO*o84N3YyAH1@w$yQr5>S zSh}5oEIop29F9!bj>?^=@ZJVwl{Sp&n;F@Mr4ppn{-WRBPyZcC{@&nXwn$g$xVLvO z9=T&JLW9IxCDPt-^KJNK$+ws@=X&fu__0YcT5}!puH81{bLbi!US#}@#x)E_ChWt? zor%Lm>yD`ok&*y3>hw_Zx6-L($I91lpQrzlnQ^~f;gQmk##~daH1^YPU_b}VyMGsx zwE8s%*tP%m_rD_X=rL^D_l_L@U=HTU+_TGUbnh}#DoR1Q^oM!j8ik`0mfpS`3F7M$ zjtJKDn)6O6L4h*q7i9^1P;rRI@0FHaLdK=T>Uspj=GvTegyPrVw=I{n{d?XiU+AaR zKclE59ch`njoz;|F{ zw@I-;#8yQy>U{Cwg45?WnY0pEB9gId2L!akowJr=Xg^iMfZw*hjPl9?ljj+`f9>^s zyhAW;+*`_1ABVWM!!-v-BrJ{Gyk42nxBD#yFVWP?rD&|AQ}R4xq`xZtr^I4D3-szLu*T_@=HnVu z4IGWIw6OMJ@4i=NOSH}w}v!$mS04`VK=yz>QGWH4^sFEq7G{eOX$D)`daMR z_W?}7lq58lw1|r6%gkQ;(MGQxWHFcR%C&P%jFjEU=Pig!xPMrA}7c%#tIUqe*fjPKF;7Ej>t=#F^Z9+{1~- zzcqOsYp&U?^9(_<*Ek9qSFu2csMx|?GUJX+Rvfz>L0>OKHTh8F@eB(gCxiw{*?Y zvWrSvGwJ>Gk-iJ@ZySrNv@_-?^Ab>yi##J*P(U;y!j0VhKNAQ5$8kk551H3y3NzMv za<<`Rc-hr(hO9B-kYS$Ykmm|xCmgX*$vW~Z z59R!De?Y%~fBydVdH18AERv&kYj~};cjg<)3-9N#bbAIOOCWk#xJRd*cw(Eu$NP=u zX!eY(0M=Hx85*A?LkkWzQCz8ylPs@(X?eiyJb^^^`n|v)JiUy9^wZhXN)o$QM%s{7 z087lA61;Rt7FPY;ryPRL%3k&6Vhho}1@oeU%%%a~ww$$D+R6kx#loCxcrni&B1(xw zgSqhB@4LG`(^hw=4O`okY`z4*8z?OgM&#sjN~%l3W1!c3?stuVxpXj@qFQ}_vW}3) z;>Z1H7!PGv9eAEEY`dO{O<3mBW)(*o`S#}6J*$?6yivoVu3hrF|`m!?Xb(Oj5V!o|$U@VJm0AiVslz44B2De&&jiQ%x&q{PDaRhZ{c zhs)uG9n|cmDK@8P>Em1Z>?~$4vzILN-VhMI4)za!UJ$S(A$vPzzg4)Ov9kzZINiI5 zH7R+A*!j4q4x?SR7wOKs1C}MA*E8c9H~zW+hk(Ubp4E>&Dkq#p(}TNF z(F-@ByPwAeoQ3W z*5PYP2qb?$_*xiiSd9VOC>RzQbfBgtRp0bLk!Y~mYWx@min8H?DSctzG&J7Qa$6af zhT&T468M2_hFcSEMKnh>)1iFQrX4`!j^&BXfXAOOKQTjSvAe-UOCsPy;vwL#K@gm( zj4FSbi5QS(8wct|wx7(HQ3@DqFEbdoLH+F^OdoW!w0PCpfHj@Ckjy^%ah%SG3puU| zVczK(1pF;bx&r3V+jMM`2 z3!zeNlMm8n^4+WkiNIgNH1kM()1?9->xPW@%E}z=7@D6Y-J&v}oe|VYK%mZVIV0*c zojr<0HW52o^h^z@#wkCjrbiXVpguhg2MKohsmQr>+xLt}l?<1E6$0`VPRRe089~RM ZdQQ|O8FEkx*>_FUT+X`_D+yN;{sWb$RLcMW literal 0 HcmV?d00001 diff --git a/airbyte-webapp/public/images/octavia/empty-sources.png b/airbyte-webapp/public/images/octavia/empty-sources.png new file mode 100644 index 0000000000000000000000000000000000000000..460ea9be38875601fd0435365a6aa271b5793e2d GIT binary patch literal 17554 zcmaf4V{^K? z4DQ))2Fy%AMgRz?Ar9u#;6ETBVG0Rh0cCgKD;)@LG?nLXT~E8eAW-qab6yM!S%iu< z;s0HRs3fCpwbX=P7;d)cum4n{iU5nCS`&6lFa&@>l93hzL7jE?{BXK$cTHt=Wp?$! zbj9tEF}q#=%V2+)Za>L&x@q@Ek>%J6{{JIkI2QW>%zw^)K9nIB->Mh)AXI1y)TO9G zXG|s3k{-zmR4Gc8!6DM{+N`QY(wNx)ZE^x*}yazXKvVEK*%N0}YeoR=R6hi)WS!V-z(+(K0STK&#&zQA-OnT;l zKO7}|f9|!!>x38m?WOTPSwh|NV(0Q#TvKwN1$7`>j1II;^4L65&{sj|#ZPx&9Utub?K#DKl6>!IOG;bLRx9gtoze2o4ew^yNF zTd^okWP^+l7$O&Po+D|vuPlyT2OhswJ& z!M`jqYz%#64?|fA(3$huh#3wDhNGl4VXnbUpE06s{-GSl63ax$U2rk^s%? zTU6rjuDz;+izuFSNPpQ$7^CSSHoGcjfnSgg7mc$bGtg-K7Pbh^?gz_pc>v+z`PPe> zFeaOgYfv7+O@6SBUbeVS#}@D{IK^ki-Hh|0*t8@enr>c;&h)T|L0=8Ts&2ZkZ@+T{ zPkJhF_(9gCX&I0FuZH9UJUT&)_pYkvsS3&_qDux;t5D|Qn#0?bL~?SN15CADUHmgZ zG}dbV`hGXXkCNFm+8YWX=BKRxsy-f$XdQ6_`1YP+kQgee+34?z#~NIU3Yz#NS5R^^ znDMbLVq(s;Fz`0f>5?Z<;FlIAY$2m4qE~z)eZ8>e48Nx>jP9plwqx}bs^rC7xI+un zWGWE8NwHb~E&k%&!qyJ5hUHMXALg8Cy|6MFwKz;v3B9li@6`T(WSkt%o}jL{dNK^o zG|-Vv5y|wAie%?^i2PVRxxlB$ykKtdSz)ybLKWFrzyeofB3CikOjZq!7h39F-(+uy zS%{(XlENgz%cQiMWdNnfROZ4gx53Uh8_|3_=7yVd;AV13_Lv0HYV44Vzn^yHD2?}v z^|rHRYtGJ8?=N-{L_r<-emAb>_~@_%c5=z7s^Mi0cZzJX*&Y5q5GeK6FFs?_G{`5d zcIy~P5MTcpdCdlnlpwi4o`HT|ruFWR>JW@^QV_|IO^A^UEqmTe^W$qQNzFjQL=I6n z!Jm$w3t+3l`g{PJ(rSz=50;?RLzSQnznU)MsCPf?v{`M)xbLlV&lOoUtDK~!sToej zs((IS=vJ$D!@d1Sb}C4}Z+7#Fn^2Hrz+it89)?t^#~H@dp+|7@SD3{jEWGsUlBu?} zjfKF84@-QNgUYO1@Jxn9P@8VxzN=X65Dx=`o%VeapQr^hcj?g$pCVxeRUMC`BeSD@ z<=IK1@w4&A@740a>h`H_;HUcsLmG1x!DV=T$a%`kg>ePhLkL7OzRI0yo!3rOM?q6J zcv>vg=wcp?!^5D?@i7Ho_wlbM052w?nC&SXONF8H@lDFX{cbWVA%_7Mh9@*tr|r(% zxD7k_2%lO`d4Z|zb$*97yfU%!;`fA8ZoxSM;wEv_Dv(Ty}>-w!O1FQtJL1;0(KWh z4M1Tba)X1!>+C51{>jsayt;Cvykbdi_gC-frfs(ILlcrzIViyJLeRqvzC^@a*xa1^ zQYZPCnmIT75tvI19(B{exDoosv?oxi%>VHq)T30I^KI$C&ttqMoVynVMz@`y?%Rir zg>UKJ1BI%KYvDxb`GFe$Mw|b`;{$+nx>P`>`dN=xri7sH-5a)dZpi5lBLzs=+!hq| z2ZxV7()wU6SfGK zex{@?n;*NvXQ3sd=pxJpVEr~)Hx^i7*UO2(&?9PDuZj{@T*umzq-Wx2l@JHI?(xAaEXp*ZaREc1uqK%l<9c^V*VjjG219UT2o4cW(2$ds*B6N{ zqPzY}A|QnPj4#=AgvZ{fG2MOT2Wf{)@~oxgM^hhimb5Nl@5=y2vkZ`J84c@y9(jW3 z8G76d+aE;WD`rQr=rSb#gQBYS!#E=Z?%JQ}(%HaZ%N^?03j@q2Bc00Y{EpD|u_Zm_ zcZS4s_jr*w^g^gJMmKhwX0A;ZL37;z*foL~4JX>FH+V20zM?nAD2~z?0Elm_6y(IG z<3}Y=DRprtF0{s%ZgB8mC)QWvoM|jJT~SWqa_d^=D0%71?b5`TX3{x`jPQXe#fLPFeM%4e#u%LfpRa-3fBEEc8gun4e4l z)2;t>6&4QFKU1sPy~Zm4FyKmLv{rUVdwRNi$^XcJO^9mw6>&!%_ezBTpHU* zw4(Aw`#kqfdVBuX0dS@Gzi2)|YyNyGV+`uLzkWLm+|?hfSsGL_g)Vb{$B;77&}qMt zcljucJFd%twFXCy-DPUlTYE6m>& za&TKKP_TiqBkLK21R5B-Z^Vq%&E}iE;+<_)R$e7U9(!h*4pt(G@g2TT@W!qa02SKy5yA1q0XH; zKly7+`quCUNEY&CY-jl8NR8AgASFv%_Vy7R-?u(oBt2MX8Oh1P5CopEvpuhzg`NDt zt2^)L&wX31a43FpWVUrK)(`V`@vcX-$9ygnTXjfMf)-zb&|S~1Sf>jOEq~JR_!*vZ zn(zIfkM#1&IF?$!++k0htF#X}DY?dCBSQ&kM@^QMYDL6x&IRHU>5r*nP~?juHG7M{ z7Q$&+lfhIj8iz@jpYt^1o*9wSS$cZoGrPgJ$-*l{ZEfjGWwK1|t@)4ut8<+1cOWhz z?hlA}FVY)(nxo$wZK$?~h?vaT)xer5+f(nBye<|D3NU93yz!wU$=*1@45E+Yuw5nR+Oeajkzp^V$)(5tIf|))zI1s9u3LD zDP&dOb>BeF-P-z}>D%ca`#+~4dgvdtA_VTc!0J&pov|{gLJ<_? zR7sFiT=Wu#0IH8&?O`+1`wMpLMSQQjO)V~}aQ85gB1xCTG$&kVE~k&44cyN17(D74pMMOmNX&%BY~cUkMW$L)r|=Gq*inb=g(fP-5J&Yc~&^usL?WldLg?eJC?b9Fz%C0*sD#G zw>j!I6eZW*>uGdU{$gy$x4N*bEJ`0TZoSipXP}Egzeuf14h8W9~4^|a3EsAq+B3|(>=8p(4lMpcP*;^^8;?%)!!~=dj>(` zgd1_J=v&dGVqeMr@o6Q1E#+q?@kx;1q)C1=d^@x^ z0KbMcsN*VPiq=9~@Fl(f9lr8=ok{{`$@ALqSuD6`wx7fqWY_<9HG-yK{i1c)Sn5&C zG>jEKjc+UI3G8^40`f;ois`NhYc$QWW9^9Y6@zq_Sp&V%30>Dg<1_XVU5b;&s}3 zmuZ0#e33C3R$bs|RyFGJeA2t%tZO78jCI`QGBfTah=(7(!ShB7jHW-H_mhS*%gOz6 zP#6A+6v@|%BGp<}2=!L0$g~M~g7uBI^C0k$ zUP!5(Y2cs=TU%C->?@p4vV@JD=ttPO+E(77;4P(tec{RG=%}HLU!-#Ny}j&jj~D1t z&lwbD>U26$QIof?O#}yyqOsw4e{9C|tYYXAUbsc!1%3LPV5%*6dp;~$<>R=TuVZda z6IiK%`Q0!=SeAK(YD%APK@i;RkWd7|%sB`zMbvEtVLdVNYgs!d`Zsu7^EZMgD$*1+ zABg;ubHX^?K9e#{7(O2-@p-r|Osk3$kekNbe)tD9R&sG>==j5M}DD=I}=&I8BEq=FMWP5C2idn&^k8sseDs#`K*rTTJV ztPt)=V~0lj0avYFi>lkB>UR6V1=4{5%<(pU51+MXV5`aGSHUM;e$*yD~Qlcg5COI-nPa>Q4 z*?6ZV?5v;hzSQrFn&O+M2N>l>)4z`24<11tWtM)irS}@iRhmtJQNqhLYUGw^D@3Gk zBX+}$|7zf1mKNFNkH(ItF$xmn$bH06y1`bNk%|m1P?bT=VH|ftbSs}a9Uw28i$G>v zh=k9SysW}P^mA1lW#pLCkD%LSlyAg_dr|ru2*A+xwPaOBc!jzp(V`Qk5 zwVXI$OF>RnvhZM~uB_a>E(oYAqhVRb4#KZZ-R{Fd@QYzj@?3vAdgm2VJ^MC8n;X+4i-L;=@np9P9Pjh;M&A*hM}>)egjIKc%jk z^6GshF&yXGi7bf4U@$wF(SQlz!^7J~S{&t}BBjym?_&ZILi<~Uf_zWON#++d`8X4M z(Xlu@JEo5*rJGRTM*e_n?^c5P3?R-7v&xZuGHkUct#M5Q<+vnEs%V$D#hMjT(856ij3AwN551Vw3FPBH0 zCtB5-bttvQ+b+i)OYdiCYr4ivOH^a~x3ufM*J*5l6}8+v+vbWBA61hX0xfGajf`S9 z{#SU}90@d~Q-059vHNBT67Hvt3Z#b=RizL76y|)?{4O(i1aH!^B7^ zQUOBgmhuqiobk!}mh=+p!jyZjBRVBLO)bia#KbcMV<3nBI)t z7ql#GL$E{OhrQuLlatjT4L2(!YYDHNAG9C6^-%Br(&_r@M{vKcsNm@O8srL-=W@Kn zz4qL-LCUYykmtzFx??-L$9tNUsZ#ugb6tMuLfz~l&(xAJTjy(Hxq|&YZv5&b2TZZ? zRnS)|qgS|cq;#@UBPOT?G|*Pk$m|pHeM_4xTve9UTX%%6NwqmoI_C;GE?Q z<_*>!SUF^icb**18cI4#X!HNQk6I!`o;LthNF^__$l+oln1Bui}E#KQ?ufQzeD|af!VQ zb2q6KxTxcR`t)k1HCGmsH!Gw>XI4yQqrNS`xBH82ZIB-5{~Ycd=12sY9#l}}rSl3< znWW4Ba~5Sj0=XrYW{(yBXTXyAeDG4kLnw zzYi-5!(nQ=-`(4b9Tb~Ygi$IA5@U(#H%j^r(1O@um`UhXt6p`gI=I@SUZUW1AdqXX zOZlNLslQ&ol@1BpwxH1j5m$Qh3k&McEnN)Qft4t9AkqSnFot2GRN{FD0pE%2{Z^=N;P`S3&wL@pdHnbO-*ixz$c!a!hhK zyd+0OH#_(&F*0Pui5*=gvn1>og%vTE2MYwv>`&oSz4kxGP}!Fh$Z!`m5YSalqmfXg zqc={@&u>Pa2JHs(2w`CrQ2nZ*R1VZ8rqxU98%)Aw;>hNOvdS-ZDvL@JwZqw-yp+&% z*{mR}$(`d2w2Ze^D%0|3Wl6937QV6(8AST36dmrwAN z$7QrJNCmUJj?p6NS$V1zv3}mNRK8~z(}R0p8yr8Zwk3Jgft`rMLEpKErKJi4)_V-2 zabzUQQ3wCNNa|khoKb>VE8xp>r(@Sr?n?*hA1ves8VfR(LB6EBXx>IJ5?8R-SIbFj zwqc4I3MACJfojkqOBk;;x?)2FSK@UwWHX35m5&aV9WyM?G?Y8*x7vXMgvW@kvY@_L z?;MF7p(po5@M_cl_!UeFX=#Pkq}CPGd}+1!Cb%=I_9W@p%daKXfAVmQl;_raRS4ZJ z9ma*onx2tSlT``yC{BQ=HjHB`q$p;Ps67x5h>*1#W=w{g;#oqF03usT^ z?M>{GMHM2fjT{SP(Xt*et|VV9zQ0=O|OWCcvd^H)Zv z(LuJD^0Ke$8w{^_!v+bpp~S-YvV6g=oF{QK9*~bP)T^qgg^b?g1$smq20yy`HZXem zD6C_nWAV864c|>zo}vcEbYJwH<8hAP+Y<^8!IDf6{-XyKP~4Umd@IWFJ0~E zVrmU7JVU0uMT|FK5T40kqDoyy7r`_<%r@uIib-IE5ZnNXd-?Szq{85JO6r6+_RNEO ze&pG_VVoP@IO|zn_q|D9 zcdsK}4sX*U$j|~6oykZoW{=+U-uGfw1=XrHKMySfDU~vP0=Z_`I~4R}OB8evBTJN< z8}16dz|`e{_t8Z=!nLe=>piwVc2~R&=qZjsASi!CK}wYCL%Jq(>+i`JWp&oXO4&f2 z82P?mjqdSN3`e4|UiZQX#^n7ll?YDber!C!W_`K*xrk|oh$mJCD2Y@?qjSqx8|&g@ zClKW1hR(5PRa+*o^M7E3sH`pzXh_8-?lRW^n|poc;jHlPjwyV*z8)e}qE9_~{D0JK z;V+0wXa%&a1`*8`;Hk+X{u&q*K>Bf-OrSR0+}y+kfI=8PU#^GS?tt*$e1havd*fRe z7d|}0qFzLe15C7hLB#_1h(aYW;tnX+wdHiyXh;o`fCd}Duw#${tGJ552|`x>$ob{2 z46(sp7}mS&&bAS_C-okCBwwOPmI3DneDw#*$glXE)MMP<2DjRfS7iPpkD$j0z5}`6 zJDj{r|1w}Vk9nLxr&ae?)ALzYzv6J%2V=9@2u7n(-5t7X`USyQcv=XBEb`I&(2p=d z1RgWRROHJ8-%|~S%#Mi?a1wnU=RRr5#{e6Y=86ajUw$3s{|;zuWD?MnlN$uW%;9!Jx0uQ16ql15?GJ)zaJ$hZ zE&23!YB;e!nm|iVP7Z$<92nTWx{4Va69;2Q7zx($BqBFO28YG;ciVnrN2jlb%Kg{* zD@nC;CM-*f< zyWN0VWF-h#?eMeGi(N+Nx4B}fYNZ=e^XnSQ;(6o4hX1!`xYoHy76e*zEpO`mb^&AP zAI7OpCel(m+awx^aL&|%E|DzG;f`D+7CQn#;7#bPWkr~zEVJ@-u?n;0{`q=G-I#bS z`MmR;LxFQ%P`Mp1kc*{GUAW?Obbpj$qTYH!(vB$y>tx=@gw2#XqbfOgn4`RKhgGgGh+v`QtzWxlWgjRx?nHdQKgQ6PFYNfirL@F&h^{G&H)5(pFqMLo~ zeaAxhG1Fy8UE${DgHOmdpiPYYT+AHIoHE6~U*VS|E6HOU3@XN$<~06nbR{ldkNX?& zuMoDY$&l+Qiuvw!SHpM^R&lv#IOI|pSAsW|uFP#hzQuj!$XwYg{h18i_z(y;6JWghRTa&+l`txJeQU>`?A}&S@3Z6#^91< zd|{HmLddBGf6BA&rmpn#wR^)#ay%28=@}%UT&L;Z&m6K#W%rVV2n56G0pyd%X~!gF z6gtISi9E^5$$itqaD2Hvl;mm0^UWOhCuT{-Y`ks^YrX&JcDccEH3R5Qvl-U}EsfS3 z9<@GCI=wuSWLhp|Tjekxz(s$Bt%L0$Ax4+blP3mmW0*|#2)2qi&8YXxapP?9jgXh# zypX^5y*vLbP{BoCTHgBWm8YpEMlz;^ait@MBOa5+Xi~C$aq|ra`qtAK-(6lkAvDwo zu;De{EYds6G44w|+`0~@jLb>R$A%NIXl>U-ixJ@af@l*N2u8c%=W%5yh0P3R zZ>H&UeCg)?Q(yI}hLK+W8hj4n@H|G?Ha?qhn!p*B+>Q=St%rp-nVObK&L%pZ=XU{8 znC{f_HgNGMSUGJdU6!W%j{ZUNI#?y-v!H+$hi-v{6p>yu4}NDCK-tlEcba24=V-`` z7qT2q8NfA0HANm@lvh^}Aoxpad;Nk59xoE8X4cSZHe1@E4A_*?FuS|Q7>rM17%}^S zS?K7C0apvEk6urPH6;yAw7h*T(i+yoT5(NZss}Kex)|WC zo^-KD$W02QXoSI`8bl4aM=ZD=Aj;rL>DB#R`fp`T;X(|!c%>ugZJ-31>%g};E{9ZYR}vS%uv+>lbVlu@C-v9zi!`PL%1 zt0dIHXEXMWc%SYMF*A}m=v6;bzpT&z zk{C5D1_jv>MlNJgE0Lhj#0Ea+Z#Nqh_G~Wvc8+tv@a($&zpZ7|@rQ?tSuvIE|Lm~` zgN2fi7sf3QHHcUT$Mm*6#cba4%!k_0 zgH7riXJMK|$Fa~8w7)R8m3T z>fbZ?c|bQL1`$FnydFxkIBL@jFga5Cz*z_!&9JU(;ueHyE9!>hGa5Y#J)e_Ew4gIx z5k)KuSJyc&=jlN>Oh2#G`Fg?QAk;GgS);dQEih_Bw59$I{Bo!!*_}naI9sEpW?=kq zi#S)HY^>iCqc)lgNm}b+G%>g-#r`RA;>r9&Ca1%7m0P{(OJ>`6^#WJiNT&`)TV^zX zvtF~WA1Aa5VMzFK#0sHW%s&Kjtwp?)_0FMF(F;3J*`RR%>TX!`xz3^>C9{EyR=ax#G$7CNeu>UO63!sv#$A!Qu^ z@_c!uKDZygUdT_ahcEzgs?7M~o)sQmjHo)AF42&`#xV%>L`KzsiWW9UsYY|@8~3Hy_&%eEa}fGngAP{4TA!D zD}B}gwD(}9Ic75_*x(zs?!Y=C7MlB=j6 zq>7BPIZjJT9^TLBTI`2{kh9WOW&zWqd(4^k#X3 zSyCgrYwK{?PZT8y$@}^^ujuX>9=<(1Xo0<+s=bx+ekmROyOgGMT782kc_ojKWM2d2 zSDJ8rBuRWopytNui$WJwe3*1qiqD45Fb5EP5*F}+L4~l;n=-wuj)gU))LIkc4CTLel`6LGV3`koBQ!#!g&_ez zd42NX4!=LL+=q=!1p#Sd$AIrm`Z9d3{DkGGr+97?wYCY1>A-}(M1KK<$2G^My^rAM zAfwCc4rAde=i#I2{{o_*FsxhzM>KE z)KlE9%nuCdT^?;RZG>QzK)rv=$s>yA8-)^HJS=A<&Nj@M|GGxZdNhg4$(A&@vZxS- zIa5?j+^-%4>>m&U(&Xd?KKEFS;pQ4iRgjcP3MoTFh6qcKfL`B%^U*l9c>aCOW#6ag z^Smit9kY2b7$;IvPxpsNi6uCmaTv~i(gW7XYe5hoPu@Iv=nrYJF;{U6Om4N>Ll!6# zXGLbYrAF1!4 z>9|}*m|r;T1!Z9|lKKk7=*5MM9SY~!JDjdq$%!VPaze@RM6n|nyzfa^{W)?==)$g$YDto$U!C`K(SgZjCRg<26zOS&@#(0V(z@y>zqY&zl~of-pQBtL!Eeuz z)_3QN{ux(Gf*`F2VMIvmO~*Pg$AK3y^<_nDQe4}KVlK4Qp8R|v`xdK6=45)Ek zxu~!UvqDXlh{ys$TZfm?VT?y%I}@Xt!AQhF3(DfE*+ok^PLm>oV;*0T>*?NsR^x?o zrsO)Q$^Dd4O!%<=*p)eq)pWqIY1a3lS1@w#KmMIk%;gUv*fWRTo^j))(PEQ|_-?Y2 zl}r$Fbt}84z7>Sx!a^___D?h|_)oQr=}AalXzrTYHB26zkv?Wr-w+m^^{6bb6`3E; z7lhMA3+Sg*AsN?L>HfdwmUOtSd+8O8^^M*-*4?7AtvXDrz_qp4r}Msr^6j!YJJe=P z2Z14zqHDl0G4-GS)-~!u5)*K@3`Y#NeJ=iVy#(Ff{?X?ASNPaG4`WKVPK&Ir_E!tx zc`eEPncU%Htl;+k7NvN3-2p;CaTSoCYi=EUUFZ%Br}Bafj+*!>D<)T*`hwX`EfTpNH633bERoHv;nlQMX29N7YVbLn6c zC8*7};G>W01{jBz>7T%LUf1l41Wno&CxG>zW*BhJ`yX1@QFd1)q@Ej!>mN;sLgLrXDn zf@PMMeyVfL9=14B=v;6>w)7{u{&^m)BixVtPgdRRfZ&Vwx2M1A_!7;qTsf?bB62)V zFP2El8>yz-riqppa?(mBairD5Csw9~MvvxT6{>>DX{&4Y1b~Ft)i>ihYL6-wT8L_9 z%ymg?c6K?iH?iG0C|)rSFf6w|MR^Do@PT`M1U5mXn_f=c^fzu0@0ulz8eUT|M@^+r zZqlr0Izi77sK*P>G|XVb#jh2Vjfsc3)T@Ht6G}2ctrTz@7hpWf#4#F3?a>Yets|TH zY7|4cpTI6bBw-D{E_fIJB9zoodtWFEVf2nxys-JN-14OH3H?z)1RV?L%Br3!`fA#n zD>PD5nM+=Pc6uO}a7=uZhMERzg}V^A+& zcRMof??_u#?qHA6j{e~F3nCsPkj`c-yI(CJa7=iio;U&kQ&3@Am@t{)m%{BZox(z~ znmR`R+@F&Dz>#{(442NovVI3CISH%Y@@ADSgcjKPR75_b3yVy%!F;nfr zw(>OSK>j7Cr~{_3bIxs7ReXp-WhDl3s)8pO>erJ<2mTz`sz&&oAE%_noRL;EPdXsV zvb2!%48-gd+_gw$S==u;ped5bet1NcQYXI6P)9p#R%Mp2VUJVfQa-!5S|-(_OJu%X z+Qi8q-L?=2rImH$p=c&JQUJghLmcKK0Zd{-A`|GZ#vII2wO_LmHrTcx1f z2Tii@bc4R*A|W!L)#(RN9690mZuLi?Kz3juK?OvYLaDc0Wpa+5)p%|rANjQY+ERd-S;or=DH6 zx0wNfW$%oJhdw1so7^IDADsg_VsiWS%7`ik@*bwg=X}mf^Tl!}k1Hs#?AGyFZ@m%x=vUJ&~Me&;F^zd!`cfBBn{&Pu&`??$ZZ_DTza8cY|X+U$_y zq!-e8))!dQVVMjfs`CUQ66oo3$J1 zKUiyzw7zUl$1iO;gH^qCPrUuvn@egiOI2l{qK|eCDMnu1KbYv*)e=E^FC8x;Yu#)b z98a@^N@S8gF+0cm_)q(cPuFt~3k)1$e-+QJ53^a7JoUYsYnwc8q-J8eLx?9Lii`pR z(E}uL$;1o0Gy2t(dj8L@RTDf9TB4gn*}cB2FS+lQ5{fN=qXS#Td6m8dyX|D0P6mrl z&x4um(ld%HAY@`uLj_AO&&$nt;GXO0onVE9bfbQ;M7)Fc0687Hcx<}e4aG-8LCLjMv!?9oRMi{uiRLw1K4YC-uvi?U%m zarU@%=cT$o^lKs0IYmJ8@TpRGW>+$qHOW^4J%Q^>va*9d7kNf$F-ZY|gUR3x;{vU% ziOS0|(^H*+$m4A~JJ8voy0PP5e7NyLH< zSxF=B9$%C7ri7&$Ap{k6P@#`*3t!xUfS8K+Z?E%Lrp=^W?e?<^+r=_Sha4@*2{PZ; zo{d4}V$LKZ>KgFS;>`t*q*N%0TL(vB#I|Z$PL|=Ky#QxjIjI;}xW4)6NA<-7i)}}G zbM+wg-9{CI!DCy5cqeXD>a_xE55(oq*)*K2tN3lAumtigD4BGbO#B|G3~j?3l_W090gm2vfY zw2}#JQwh$6=yvSZZI#%~Q-91uGDy}VLq-?BSK4J`CiyU2WSXez4yZOPh6 z&ZX-S!n$}6CDmAS4NGO`uk-?+^ysnaiuoucKG74-+aML6l4aeZ38a!yN_2&Y#`u?S z30bk8n?l*%dEnfLO+`5s(r3vZh}yBT-=mQ;vOhtJgFZlnnCd(Q1_DC#{9w*@$5vcj zVq#mkSV!~X-Tx>qus^4E#>P&;6svmtA{b1J;!@RPP~?Ex^Rx6a+mZ>A02XaxrZb;G zNj}?P*NWYQpH(%lS1ujXep(-@vpY4}2MXe#rzEa&!9RWkAX7s0Wl9so?8GpYF83H3 zT7;s?5BICg96k!YgN_uZIO-Kmi>O#so0nqM+AicDCN>I!8e&&XmOqj~aX9YkJ$2Ug z-Z2CX(waB-xa+}=a>jR89yDPD4TAjx9(%|ltuNllcIZ)Fc97F5jm zORdA-V~h;SkZ3@9f0bxJbG~8Fo>!GZPH~y4FUA^frYHLxpY`sl4{T&k05!?NO$_ zjYKTY@Vcw_>;eiJT6m`i&w^*7e_ z&J&om=>IgB{P~ZLwhDi~20SS_hdOTnaG$mp{EKP<8D0I;TnO%L6g9#N_LvlK6NAlT z)E&S20FSSRyVe>~0E(I4$$xZ~D2s~Sk*M>9?4q#ls(Pl^T^NWsG<2!cId$c1*W%sY zONGI#VFh2`f~iQ|dSPOAF<~h1GU~3AvGHze)knwlSa4IUd&`->%oD7zug4$0k2^fn zWXKm*1WVI}R5Ua*B5sQD!r~_3daY1~ziFl6%S$r0ZEUfShY~P~0;GrZa z&k9E~=~<)~u@&$Mi9kFTkM^IEZ^{58lQY2VgWsL;zdA+t;-zB7Rb`r(uTSw$s2JziQAou3;SB==0aj#VCJz5GRw{)gQDRZt2vh`+V(h1Kl=(z4G9r);a>H z{R@=HKbD@;9%ae90ZddFPsq>l`|+5)YVz(-6=|`UD@8S1+F41l@}EyHHuR)6H?=RP z43CQ&c?ns0frrh!qBpDP5_~@UhAeRNqmY;&RNjklP-0}X5OcT5Zd{|$LO=5pNbwE8w^FXr-tW=O5BS3pZzaQct16*m4d~~y`ynKmg(8fgH1y(KwuXAq*n5itU+7& z_lEP6KUqy3zO$}{cwFD;=L&^y5cWKJ+;wB<@aolkoY2@bkCCdR=8dDntpiW!{cO~^ z>Aw&Pf{p|5AQI%%w7n&{XS?lYl#Dp?%oJb6q}mWK6COoH_Pv4OpnD?nGEW!2+(rTX zI^$xqp_N*Q6)BCHga;O5--|^Xa{OL9X2C3H#8KuZxq~2vkNu_gC-e*E%GA?^V1=7} zl_+1wJ9A0DQ8+fR`QM+#JrC^@(VKP^cydy2hAkadZ&amWtd>?gLC9UN58bUbe-|_D zrC{uXD!^k!)pfO+inU z`f;F0?`q>8n?x!#0~7s$zmd{iM&p(&lK0^c(Ta`6D}O!2jdbRD_uGPjhJV_Aw2#z- zRtqpgJ-P=IASXT7X-!1)YG0smiaB2@v|N~YqUect!*R4>SjHF@5c|6q2}e}qF+1ay;cY=Irv*~ih(i1hX{P+{m=A%397cgl%j!clk944s!-3k1 ztI;l@nlOu;Y#GS(Uy|k@7wzdVhx@77ws-)>6Y#}2^wba>h#2vu@{#N+{Nf|8jgsPM z)j1{)6ZLLxm4Hb;dRgtzo^j*tiIbUCT85Pa8nLmIgao|nwHHCEyj~7K=Ml5nj#<{a z$&2H8NHS@);W#sALk1a-2M>H7{LGsZ+F+fBx!Y7N=$jY+54Dcf{e%Rbc$io!OtY~I1unwgI&9iZIfqxgt5<^O^W2u?H9T~9E6R~#`!km99r zY2IawI(%l^Ti%hrB}EpJP{M^t9?2J0cL7!T`a;V6>qW!A?kR_P<@;Y-e_{Bi4;GBP(qb>K zY>#Kp9<6-yGCH<>-?~XQ*HglX1AR_67@qnCB%GclyY}Ps zh}Z**OcTJko1kD1BS;p@vn?&Z6UIuik#~MP%u^@`sNK z_iTUU`X~SH{?tuI7N_Foo;dZu%S%34UFwx98+2=_@{K*K)lAfH*MFD! zb7L1*Vg+-KfVZFN_2pYQ11Bh{<+Ym3%H@=)ShjBQ8l}U`K{pPoo?-g8FY(1CMaUmj3v(>!Z{691fK^w;hiC_3r4D;uXJcwc~ZDkfF7?pm*54rb9`GcVu|3+Ei zwEfYpb{;vCN2SR{Z+NQKmE6Z`q2EAHH7u%AOlJ!z=tt%q2CQSX05D zt(jZSGs?1%>5p1QJA*&X!eesm5<9_z4`j4h~H=Lq&a(3bmDD&D^9Ir-0^j{ zR(z)_FvCaAf2MJE^TP!X`hsSMDPP!l*JVq;B)`8=dmnB?YcYUJwlD%#TTNR}EmdRN#MYujp zjl3dgDyuiq(@SO3?X0^owp&yx-$repA78w7dinv=pSfy5A~FnpFU~!#mg4qYF6+3G zd&$Ibo7ydgYwYT5l{hE0lpbQ5u}9`;|L#|Jic3{Gib^yTh-a+hi> z0`sJPI#2BlI3F*xXWhwmkCw>-dz2`d8uQqTVnFwiQ$@oLgrqjI$U3! zCr#X=5&P(=iIKN?dQcNI`JA`_B_9+t?$5)E1^a_!elQ#F@qE zcgTwG$-!$rcjR{c)sXOd6yxf3>F-qc5ZTZtmc*R3LBw$!FeP^ie$+1wdD(8*)cXl| Pgb0JDtDnm{r-UW|n~N_X literal 0 HcmV?d00001 diff --git a/airbyte-webapp/src/components/EmptyResourceListView/EmptyResourceListView.tsx b/airbyte-webapp/src/components/EmptyResourceListView/EmptyResourceListView.tsx new file mode 100644 index 000000000000..b70f0d551806 --- /dev/null +++ b/airbyte-webapp/src/components/EmptyResourceListView/EmptyResourceListView.tsx @@ -0,0 +1,96 @@ +import { useMemo } from "react"; +import { FormattedMessage } from "react-intl"; +import styled from "styled-components"; + +import { Button, H2 } from "components/base"; + +interface EmptyResourceListViewProps { + resourceType: "connections" | "destinations" | "sources"; + onCreateClick: () => void; + disableCreateButton?: boolean; +} + +const Container = styled.div` + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + height: 100%; +`; + +export const Heading = styled(H2)` + font-weight: 700; + font-size: 24px; + line-height: 29px; + max-width: 386px; + text-align: center; + strong { + color: ${({ theme }) => theme.redColor}; + } +`; + +const IllustrationContainer = styled(Container)` + position: relative; + width: 592px; + height: 276px; + + pointer-events: none; + user-select: none; +`; + +const OctaviaImg = styled.img` + max-height: 203px; + max-width: 100%; + z-index: 1; +`; + +export const BowtieImg = styled.img` + position: absolute; + + &.empty-list-bowtie--right { + right: 0; + transform: scaleX(-1); + } + + &.empty-list-bowtie--left { + left: 0; + } +`; + +export const EmptyResourceListView: React.FC = ({ + resourceType, + onCreateClick, + disableCreateButton, +}) => { + const { headingMessageId, buttonMessageId, singularResourceType } = useMemo(() => { + const singularResourceType = resourceType.substring(0, resourceType.length - 1); + const baseMessageId = resourceType === "connections" ? singularResourceType : resourceType; + + const headingMessageId = `${baseMessageId}.description`; + const buttonMessageId = `${baseMessageId}.new${ + singularResourceType.substring(0, 1).toUpperCase() + singularResourceType.substring(1) + }`; + + return { headingMessageId, buttonMessageId, singularResourceType }; + }, [resourceType]); + + return ( + + + + + + {resourceType !== "destinations" && ( + + )} + {resourceType !== "sources" && ( + + )} + + + + + ); +}; diff --git a/airbyte-webapp/src/components/EmptyResourceListView/index.ts b/airbyte-webapp/src/components/EmptyResourceListView/index.ts new file mode 100644 index 000000000000..585fb92a64ce --- /dev/null +++ b/airbyte-webapp/src/components/EmptyResourceListView/index.ts @@ -0,0 +1 @@ +export * from "./EmptyResourceListView"; diff --git a/airbyte-webapp/src/components/base/Button/Button.tsx b/airbyte-webapp/src/components/base/Button/Button.tsx index fb3d2c16388e..3e304cce814f 100644 --- a/airbyte-webapp/src/components/base/Button/Button.tsx +++ b/airbyte-webapp/src/components/base/Button/Button.tsx @@ -1,9 +1,9 @@ import styled from "styled-components"; import { Theme } from "theme"; -import { IProps } from "./types"; +import { ButtonProps } from "./types"; -type IStyleProps = IProps & { theme: Theme }; +type IStyleProps = ButtonProps & { theme: Theme }; const getBorderColor = (props: IStyleProps) => { if ((props.secondary && props.wasActive) || props.iconOnly) { @@ -96,14 +96,14 @@ const getPadding = (props: IStyleProps) => { return "5px 16px"; }; -const Button = styled.button` +const Button = styled.button` width: ${(props) => (props.full ? "100%" : "auto")}; display: ${(props) => (props.full ? "block" : "inline-block")}; border: 1px solid ${(props) => getBorderColor(props)}; outline: none; border-radius: 4px; padding: ${(props) => getPadding(props)}; - font-weight: ${(props) => (props.size === "xl" ? 300 : 500)}; + font-weight: ${(props) => (props.size === "xl" ? 600 : 500)}; font-size: ${(props) => getFontSize(props)}px; /* TODO: should try to get rid of line-height altogether */ line-height: ${(props) => (props.size === "xl" ? "initial" : "15px")}; diff --git a/airbyte-webapp/src/components/base/Button/LoadingButton.tsx b/airbyte-webapp/src/components/base/Button/LoadingButton.tsx index db3250d2fe55..2c621077d5c4 100644 --- a/airbyte-webapp/src/components/base/Button/LoadingButton.tsx +++ b/airbyte-webapp/src/components/base/Button/LoadingButton.tsx @@ -4,7 +4,7 @@ import React from "react"; import styled, { keyframes } from "styled-components"; import Button from "./Button"; -import { IProps } from "./types"; +import { ButtonProps } from "./types"; export const SpinAnimation = keyframes` 0% { @@ -15,7 +15,7 @@ export const SpinAnimation = keyframes` } `; -const SymbolSpinner = styled(FontAwesomeIcon)` +const SymbolSpinner = styled(FontAwesomeIcon)` display: inline-block; font-size: 18px; position: absolute; @@ -25,7 +25,7 @@ const SymbolSpinner = styled(FontAwesomeIcon)` margin: -1px 0 -3px -9px; `; -const ButtonView = styled(Button)` +const ButtonView = styled(Button)` pointer-events: none; background: ${({ theme }) => theme.primaryColor25}; border-color: transparent; @@ -36,7 +36,7 @@ const Invisible = styled.div` color: rgba(255, 255, 255, 0); `; -const LoadingButton: React.FC = (props) => { +const LoadingButton: React.FC = (props) => { if (props.isLoading) { return ( diff --git a/airbyte-webapp/src/components/base/Button/types.tsx b/airbyte-webapp/src/components/base/Button/types.tsx index 39d9464de28a..63abaa5049eb 100644 --- a/airbyte-webapp/src/components/base/Button/types.tsx +++ b/airbyte-webapp/src/components/base/Button/types.tsx @@ -1,4 +1,4 @@ -export type IProps = { +export interface ButtonProps extends React.ButtonHTMLAttributes { full?: boolean; danger?: boolean; secondary?: boolean; @@ -7,4 +7,4 @@ export type IProps = { wasActive?: boolean; clickable?: boolean; size?: "m" | "xl"; -} & React.ButtonHTMLAttributes; +} diff --git a/airbyte-webapp/src/locales/en.json b/airbyte-webapp/src/locales/en.json index 2c1378b5b4ad..7e6de2b13dae 100644 --- a/airbyte-webapp/src/locales/en.json +++ b/airbyte-webapp/src/locales/en.json @@ -232,12 +232,13 @@ "onboarding.syncFailed": "Your sync is failed. Please try again", "onboarding.startAgain": "Your sync was cancelled. You can start it again", + "sources.description": "Sources Sources are where you want to pull data from.", "sources.searchIncremental": "Search cursor value for incremental", "sources.incrementalDefault": "{value} (default)", "sources.incrementalSourceCursor": "Incremental - source-defined cursor", "sources.full_refresh": "Full refresh", "sources.incremental": "Incremental - based on...", - "sources.newSource": "+ new source", + "sources.newSource": "+ New source", "sources.newSourceTitle": "New Source", "sources.selectSource": "Select a source", "sources.status": "Status", @@ -291,7 +292,8 @@ "sources.lastAttempt": "Last attempt:", "destination.destinationSettings": "Destination Settings", - "destination.newDestination": "+ new destination", + "destinations.newDestination": "+ New destination", + "destinations.description": "Destinations are where you send or push your data to.", "destinations.noDestinations": "Destination list is empty", "destinations.noSources": "No sources yet", "destinations.addSourceReplicateData": "Add sources where to replicate data from.", @@ -309,6 +311,7 @@ "connection.warningUpdateSchema": "WARNING! Updating the schema will delete all the data for this connection in your destination and start syncing from scratch.", "connection.title": "Connection", + "connection.description": "Connections link Sources to Destinations.", "connection.fromTo": "{source} → {destination}", "connection.connectionSettings": "Connection settings", "connection.testsPassed": "All connection tests passed", @@ -321,7 +324,7 @@ "connection.resetData": "Reset your data", "connection.updateSchema": "Refresh source schema", "connection.updateSchemaText": "WARNING! Updating the schema will delete all the data for this connection in your destination and start syncing from scratch. Are you sure you want to do this?", - "connection.newConnection": "+ new connection", + "connection.newConnection": "+ New connection", "connection.newConnectionTitle": "New connection", "connection.noConnections": "Connection list is empty", "connection.disabledConnection": "Disabled connection", diff --git a/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/AllConnectionsPage.tsx b/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/AllConnectionsPage.tsx index 03f365770065..3a402f14589b 100644 --- a/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/AllConnectionsPage.tsx +++ b/airbyte-webapp/src/pages/ConnectionPage/pages/AllConnectionsPage/AllConnectionsPage.tsx @@ -2,8 +2,8 @@ import React, { Suspense } from "react"; import { FormattedMessage } from "react-intl"; import { Button, LoadingPage, MainPageWithScroll, PageTitle } from "components"; +import { EmptyResourceListView } from "components/EmptyResourceListView"; import HeadTitle from "components/HeadTitle"; -import Placeholder, { ResourceTypes } from "components/Placeholder"; import { FeatureItem, useFeatureService } from "hooks/services/Feature"; import { useConnectionList } from "hooks/services/useConnectionHook"; @@ -19,30 +19,34 @@ const AllConnectionsPage: React.FC = () => { const { hasFeature } = useFeatureService(); const allowCreateConnection = hasFeature(FeatureItem.AllowCreateConnection); - const onClick = () => push(`${RoutePaths.ConnectionNew}`); + const onCreateClick = () => push(`${RoutePaths.ConnectionNew}`); return ( - } - pageTitle={ - } - endComponent={ - + }> + {connections.length ? ( + } + pageTitle={ + } + endComponent={ + + } + /> } - /> - } - > - }> - {connections.length ? ( + > - ) : ( - - )} - - + + ) : ( + + )} + ); }; diff --git a/airbyte-webapp/src/pages/DestinationPage/pages/AllDestinationsPage/AllDestinationsPage.tsx b/airbyte-webapp/src/pages/DestinationPage/pages/AllDestinationsPage/AllDestinationsPage.tsx index 63a2cd1c6ebc..f7bbd69a2487 100644 --- a/airbyte-webapp/src/pages/DestinationPage/pages/AllDestinationsPage/AllDestinationsPage.tsx +++ b/airbyte-webapp/src/pages/DestinationPage/pages/AllDestinationsPage/AllDestinationsPage.tsx @@ -2,9 +2,9 @@ import React from "react"; import { FormattedMessage } from "react-intl"; import { Button, MainPageWithScroll } from "components"; +import { EmptyResourceListView } from "components/EmptyResourceListView"; import HeadTitle from "components/HeadTitle"; import PageTitle from "components/PageTitle"; -import Placeholder, { ResourceTypes } from "components/Placeholder"; import { useDestinationList } from "hooks/services/useDestinationHook"; import useRouter from "hooks/useRouter"; @@ -18,7 +18,7 @@ const AllDestinationsPage: React.FC = () => { const onCreateDestination = () => push(`${RoutePaths.DestinationNew}`); - return ( + return destinations.length ? ( } pageTitle={ @@ -26,18 +26,16 @@ const AllDestinationsPage: React.FC = () => { title={} endComponent={ } /> } > - {destinations.length ? ( - - ) : ( - - )} + + ) : ( + ); }; diff --git a/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/AllSourcesPage.tsx b/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/AllSourcesPage.tsx index 3e47d6f4068c..32d75a296dc0 100644 --- a/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/AllSourcesPage.tsx +++ b/airbyte-webapp/src/pages/SourcesPage/pages/AllSourcesPage/AllSourcesPage.tsx @@ -2,9 +2,9 @@ import React from "react"; import { FormattedMessage } from "react-intl"; import { Button, MainPageWithScroll } from "components"; +import { EmptyResourceListView } from "components/EmptyResourceListView"; import HeadTitle from "components/HeadTitle"; import PageTitle from "components/PageTitle"; -import Placeholder, { ResourceTypes } from "components/Placeholder"; import { useSourceList } from "hooks/services/useSourceHook"; import useRouter from "hooks/useRouter"; @@ -17,7 +17,7 @@ const AllSourcesPage: React.FC = () => { const { sources } = useSourceList(); const onCreateSource = () => push(`${RoutePaths.SourceNew}`); - return ( + return sources.length ? ( } pageTitle={ @@ -31,8 +31,10 @@ const AllSourcesPage: React.FC = () => { /> } > - {sources.length ? : } + + ) : ( + ); }; diff --git a/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/auth/GoogleAuthButton.tsx b/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/auth/GoogleAuthButton.tsx index b23bd04d92a3..75efe97b826d 100644 --- a/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/auth/GoogleAuthButton.tsx +++ b/airbyte-webapp/src/views/Connector/ServiceForm/components/Sections/auth/GoogleAuthButton.tsx @@ -1,9 +1,9 @@ import React from "react"; import styled from "styled-components"; -import { IProps } from "components/base/Button/types"; +import { ButtonProps } from "components/base/Button/types"; -const StyledButton = styled.button` +const StyledButton = styled.button` align-items: center; background: #4285f4; border: 0 solid #4285f4; From 9ee1f791ac0fb5aab117869db8531c524e2b7736 Mon Sep 17 00:00:00 2001 From: Octavia Squidington III <90398440+octavia-squidington-iii@users.noreply.github.com> Date: Wed, 27 Apr 2022 17:59:59 -0300 Subject: [PATCH 31/39] Bump Airbyte version from 0.36.3-alpha to 0.36.4-alpha (#12415) Co-authored-by: terencecho --- .bumpversion.cfg | 2 +- .env | 2 +- airbyte-bootloader/Dockerfile | 2 +- airbyte-container-orchestrator/Dockerfile | 2 +- airbyte-metrics/reporter/Dockerfile | 2 +- airbyte-scheduler/app/Dockerfile | 2 +- airbyte-server/Dockerfile | 2 +- airbyte-webapp/package-lock.json | 4 ++-- airbyte-webapp/package.json | 2 +- airbyte-workers/Dockerfile | 2 +- charts/airbyte/Chart.yaml | 2 +- charts/airbyte/README.md | 10 +++++----- charts/airbyte/values.yaml | 10 +++++----- docs/operator-guides/upgrading-airbyte.md | 2 +- kube/overlays/stable-with-resource-limits/.env | 2 +- .../stable-with-resource-limits/kustomization.yaml | 12 ++++++------ kube/overlays/stable/.env | 2 +- kube/overlays/stable/kustomization.yaml | 12 ++++++------ octavia-cli/Dockerfile | 2 +- octavia-cli/README.md | 2 +- octavia-cli/install.sh | 2 +- octavia-cli/setup.py | 2 +- 22 files changed, 41 insertions(+), 41 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 34b2b23072f1..79d6dfa74499 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.36.3-alpha +current_version = 0.36.4-alpha commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? diff --git a/.env b/.env index 428dc71fdc9b..7ac8ef4ad13e 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.36.3-alpha +VERSION=0.36.4-alpha # When using the airbyte-db via default docker image CONFIG_ROOT=/data diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index e2753b0957d2..68acc05b4ce0 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-bootloader ENV VERSION ${VERSION} diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 68ea502c2695..9fb12182b0f0 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -25,7 +25,7 @@ RUN curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packa RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list RUN apt-get update && apt-get install -y kubectl -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-container-orchestrator ENV VERSION=${VERSION} diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index 024bbb22fd34..2e71cbe04746 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS metrics-reporter -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-metrics-reporter ENV VERSION ${VERSION} diff --git a/airbyte-scheduler/app/Dockerfile b/airbyte-scheduler/app/Dockerfile index fd8099512cf7..0337dd7a570d 100644 --- a/airbyte-scheduler/app/Dockerfile +++ b/airbyte-scheduler/app/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_VERSION=17.0.1 FROM openjdk:${JDK_VERSION}-slim AS scheduler -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-scheduler ENV VERSION ${VERSION} diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index 8504029b1386..684de4acea24 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -3,7 +3,7 @@ FROM openjdk:${JDK_VERSION}-slim AS server EXPOSE 8000 -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-server ENV VERSION ${VERSION} diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index e779bd55ab20..fd32fbc5bdff 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.36.3-alpha", + "version": "0.36.4-alpha", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.36.3-alpha", + "version": "0.36.4-alpha", "dependencies": { "@fortawesome/fontawesome-svg-core": "^6.1.1", "@fortawesome/free-brands-svg-icons": "^6.1.1", diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index af16a467609a..25e460e3dd04 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.36.3-alpha", + "version": "0.36.4-alpha", "private": true, "engines": { "node": ">=16.0.0" diff --git a/airbyte-workers/Dockerfile b/airbyte-workers/Dockerfile index 3300fb0b083c..a6c42cec075e 100644 --- a/airbyte-workers/Dockerfile +++ b/airbyte-workers/Dockerfile @@ -25,7 +25,7 @@ RUN curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packa RUN echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list RUN apt-get update && apt-get install -y kubectl -ARG VERSION=0.36.3-alpha +ARG VERSION=0.36.4-alpha ENV APPLICATION airbyte-workers ENV VERSION ${VERSION} diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index 81a2129c32a9..66e31a7d1fa9 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -21,7 +21,7 @@ version: 0.3.1 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.36.3-alpha" +appVersion: "0.36.4-alpha" dependencies: - name: common diff --git a/charts/airbyte/README.md b/charts/airbyte/README.md index 2638d0a19dee..41d8fc7fa98e 100644 --- a/charts/airbyte/README.md +++ b/charts/airbyte/README.md @@ -31,7 +31,7 @@ Helm charts for Airbyte. | `webapp.replicaCount` | Number of webapp replicas | `1` | | `webapp.image.repository` | The repository to use for the airbyte webapp image. | `airbyte/webapp` | | `webapp.image.pullPolicy` | the pull policy to use for the airbyte webapp image | `IfNotPresent` | -| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.36.3-alpha` | +| `webapp.image.tag` | The airbyte webapp image tag. Defaults to the chart's AppVersion | `0.36.4-alpha` | | `webapp.podAnnotations` | Add extra annotations to the webapp pod(s) | `{}` | | `webapp.containerSecurityContext` | Security context for the container | `{}` | | `webapp.livenessProbe.enabled` | Enable livenessProbe on the webapp | `true` | @@ -73,7 +73,7 @@ Helm charts for Airbyte. | `scheduler.replicaCount` | Number of scheduler replicas | `1` | | `scheduler.image.repository` | The repository to use for the airbyte scheduler image. | `airbyte/scheduler` | | `scheduler.image.pullPolicy` | the pull policy to use for the airbyte scheduler image | `IfNotPresent` | -| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.36.3-alpha` | +| `scheduler.image.tag` | The airbyte scheduler image tag. Defaults to the chart's AppVersion | `0.36.4-alpha` | | `scheduler.podAnnotations` | Add extra annotations to the scheduler pod | `{}` | | `scheduler.resources.limits` | The resources limits for the scheduler container | `{}` | | `scheduler.resources.requests` | The requested resources for the scheduler container | `{}` | @@ -120,7 +120,7 @@ Helm charts for Airbyte. | `server.replicaCount` | Number of server replicas | `1` | | `server.image.repository` | The repository to use for the airbyte server image. | `airbyte/server` | | `server.image.pullPolicy` | the pull policy to use for the airbyte server image | `IfNotPresent` | -| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.36.3-alpha` | +| `server.image.tag` | The airbyte server image tag. Defaults to the chart's AppVersion | `0.36.4-alpha` | | `server.podAnnotations` | Add extra annotations to the server pod | `{}` | | `server.containerSecurityContext` | Security context for the container | `{}` | | `server.livenessProbe.enabled` | Enable livenessProbe on the server | `true` | @@ -158,7 +158,7 @@ Helm charts for Airbyte. | `worker.replicaCount` | Number of worker replicas | `1` | | `worker.image.repository` | The repository to use for the airbyte worker image. | `airbyte/worker` | | `worker.image.pullPolicy` | the pull policy to use for the airbyte worker image | `IfNotPresent` | -| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.36.3-alpha` | +| `worker.image.tag` | The airbyte worker image tag. Defaults to the chart's AppVersion | `0.36.4-alpha` | | `worker.podAnnotations` | Add extra annotations to the worker pod(s) | `{}` | | `worker.containerSecurityContext` | Security context for the container | `{}` | | `worker.livenessProbe.enabled` | Enable livenessProbe on the worker | `true` | @@ -190,7 +190,7 @@ Helm charts for Airbyte. | ----------------------------- | -------------------------------------------------------------------- | -------------------- | | `bootloader.image.repository` | The repository to use for the airbyte bootloader image. | `airbyte/bootloader` | | `bootloader.image.pullPolicy` | the pull policy to use for the airbyte bootloader image | `IfNotPresent` | -| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.36.3-alpha` | +| `bootloader.image.tag` | The airbyte bootloader image tag. Defaults to the chart's AppVersion | `0.36.4-alpha` | ### Temporal parameters diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 4a19a4c4b0a2..25a245a34229 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -43,7 +43,7 @@ webapp: image: repository: airbyte/webapp pullPolicy: IfNotPresent - tag: 0.36.3-alpha + tag: 0.36.4-alpha ## @param webapp.podAnnotations [object] Add extra annotations to the webapp pod(s) ## @@ -209,7 +209,7 @@ scheduler: image: repository: airbyte/scheduler pullPolicy: IfNotPresent - tag: 0.36.3-alpha + tag: 0.36.4-alpha ## @param scheduler.podAnnotations [object] Add extra annotations to the scheduler pod ## @@ -440,7 +440,7 @@ server: image: repository: airbyte/server pullPolicy: IfNotPresent - tag: 0.36.3-alpha + tag: 0.36.4-alpha ## @param server.podAnnotations [object] Add extra annotations to the server pod ## @@ -581,7 +581,7 @@ worker: image: repository: airbyte/worker pullPolicy: IfNotPresent - tag: 0.36.3-alpha + tag: 0.36.4-alpha ## @param worker.podAnnotations [object] Add extra annotations to the worker pod(s) ## @@ -699,7 +699,7 @@ bootloader: image: repository: airbyte/bootloader pullPolicy: IfNotPresent - tag: 0.36.3-alpha + tag: 0.36.4-alpha ## @param bootloader.podAnnotations [object] Add extra annotations to the bootloader pod ## diff --git a/docs/operator-guides/upgrading-airbyte.md b/docs/operator-guides/upgrading-airbyte.md index e58aa63432b0..585a53655ed3 100644 --- a/docs/operator-guides/upgrading-airbyte.md +++ b/docs/operator-guides/upgrading-airbyte.md @@ -103,7 +103,7 @@ If you are upgrading from (i.e. your current version of Airbyte is) Airbyte vers Here's an example of what it might look like with the values filled in. It assumes that the downloaded `airbyte_archive.tar.gz` is in `/tmp`. ```bash - docker run --rm -v /tmp:/config airbyte/migration:0.36.3-alpha --\ + docker run --rm -v /tmp:/config airbyte/migration:0.36.4-alpha --\ --input /config/airbyte_archive.tar.gz\ --output /config/airbyte_archive_migrated.tar.gz ``` diff --git a/kube/overlays/stable-with-resource-limits/.env b/kube/overlays/stable-with-resource-limits/.env index 75b1cfdbfa75..2c4fabdad591 100644 --- a/kube/overlays/stable-with-resource-limits/.env +++ b/kube/overlays/stable-with-resource-limits/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.36.3-alpha +AIRBYTE_VERSION=0.36.4-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable-with-resource-limits/kustomization.yaml b/kube/overlays/stable-with-resource-limits/kustomization.yaml index 9de37822186a..4a40316bec6e 100644 --- a/kube/overlays/stable-with-resource-limits/kustomization.yaml +++ b/kube/overlays/stable-with-resource-limits/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/bootloader - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/scheduler - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/server - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/webapp - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/worker - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/kube/overlays/stable/.env b/kube/overlays/stable/.env index 3b82b057729c..89bb305385fa 100644 --- a/kube/overlays/stable/.env +++ b/kube/overlays/stable/.env @@ -1,4 +1,4 @@ -AIRBYTE_VERSION=0.36.3-alpha +AIRBYTE_VERSION=0.36.4-alpha # Airbyte Internal Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db DATABASE_HOST=airbyte-db-svc diff --git a/kube/overlays/stable/kustomization.yaml b/kube/overlays/stable/kustomization.yaml index 277f73114c8f..336bdd4f2f75 100644 --- a/kube/overlays/stable/kustomization.yaml +++ b/kube/overlays/stable/kustomization.yaml @@ -8,17 +8,17 @@ bases: images: - name: airbyte/db - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/bootloader - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/scheduler - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/server - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/webapp - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: airbyte/worker - newTag: 0.36.3-alpha + newTag: 0.36.4-alpha - name: temporalio/auto-setup newTag: 1.7.0 diff --git a/octavia-cli/Dockerfile b/octavia-cli/Dockerfile index 0172db4bf5b4..adba617012e5 100644 --- a/octavia-cli/Dockerfile +++ b/octavia-cli/Dockerfile @@ -14,5 +14,5 @@ USER octavia-cli WORKDIR /home/octavia-project ENTRYPOINT ["octavia"] -LABEL io.airbyte.version=0.36.3-alpha +LABEL io.airbyte.version=0.36.4-alpha LABEL io.airbyte.name=airbyte/octavia-cli diff --git a/octavia-cli/README.md b/octavia-cli/README.md index f1da22e32d3c..444e39503b3e 100644 --- a/octavia-cli/README.md +++ b/octavia-cli/README.md @@ -105,7 +105,7 @@ This script: ```bash touch ~/.octavia # Create a file to store env variables that will be mapped the octavia-cli container mkdir my_octavia_project_directory # Create your octavia project directory where YAML configurations will be stored. -docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.36.3-alpha +docker run --name octavia-cli -i --rm -v my_octavia_project_directory:/home/octavia-project --network host --user $(id -u):$(id -g) --env-file ~/.octavia airbyte/octavia-cli:0.36.4-alpha ``` ### Using `docker-compose` diff --git a/octavia-cli/install.sh b/octavia-cli/install.sh index 4d6e1d2d1659..5f3802e7e046 100755 --- a/octavia-cli/install.sh +++ b/octavia-cli/install.sh @@ -3,7 +3,7 @@ # This install scripts currently only works for ZSH and Bash profiles. # It creates an octavia alias in your profile bound to a docker run command and your current user. -VERSION=0.36.3-alpha +VERSION=0.36.4-alpha OCTAVIA_ENV_FILE=${HOME}/.octavia detect_profile() { diff --git a/octavia-cli/setup.py b/octavia-cli/setup.py index 5e528cae231e..8c3418a48b36 100644 --- a/octavia-cli/setup.py +++ b/octavia-cli/setup.py @@ -15,7 +15,7 @@ setup( name="octavia-cli", - version="0.36.3", + version="0.36.4", description="A command line interface to manage Airbyte configurations", long_description=README, author="Airbyte", From 3a032d9276dc2021fecca5717caa61fc068b368f Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Thu, 28 Apr 2022 09:08:16 +0300 Subject: [PATCH 32/39] #10938 source GA to Beta: upd window_in_days description (#12385) --- .../source_google_analytics_v4/spec.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-integrations/connectors/source-google-analytics-v4/source_google_analytics_v4/spec.json b/airbyte-integrations/connectors/source-google-analytics-v4/source_google_analytics_v4/spec.json index 871aef715e4f..07482cb89535 100644 --- a/airbyte-integrations/connectors/source-google-analytics-v4/source_google_analytics_v4/spec.json +++ b/airbyte-integrations/connectors/source-google-analytics-v4/source_google_analytics_v4/spec.json @@ -22,8 +22,8 @@ }, "window_in_days": { "type": "integer", - "title": "Window in days (Optional)", - "description": "The amount of days each stream slice would consist of beginning from start_date. Bigger the value - faster the fetch. (Min=1, as for a Day; Max=364, as for a Year).", + "title": "Data request window (Optional)", + "description": "The amount of data batched by the number of days. The bigger the value, the bigger the batch size and the lower the API requests made. (Min=1, as for a Day; Max=364, as for a Year).", "examples": [30, 60, 90, 120, 200, 364], "default": 1 }, From 42a58b0273d7010fa0296fbd4410a82e5fe5802f Mon Sep 17 00:00:00 2001 From: Serhii Chvaliuk Date: Thu, 28 Apr 2022 09:36:08 +0300 Subject: [PATCH 33/39] Source Amazon Ads: Update fields in source-connectors specifications (#11730) * Update fields in source-connectors specifications Signed-off-by: Sergey Chvalyuk --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 73 ++++++++++--------- .../connectors/source-amazon-ads/Dockerfile | 2 +- .../integration_tests/spec.json | 62 ++++++++-------- .../source_amazon_ads/source.py | 2 +- .../source_amazon_ads/spec.py | 73 ++++++++++--------- .../streams/report_streams/products_report.py | 2 +- docs/integrations/sources/amazon-ads.md | 6 +- 8 files changed, 115 insertions(+), 107 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index d642115f0f67..21604d505e03 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -15,7 +15,7 @@ - name: Amazon Ads sourceDefinitionId: c6b0a29e-1da9-4512-9002-7bfd0cba2246 dockerRepository: airbyte/source-amazon-ads - dockerImageTag: 0.1.6 + dockerImageTag: 0.1.7 documentationUrl: https://docs.airbyte.io/integrations/sources/amazon-ads icon: amazonads.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index f3e3f72075ba..f1c4be36edef 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -87,9 +87,9 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-amazon-ads:0.1.6" +- dockerImage: "airbyte/source-amazon-ads:0.1.7" spec: - documentationUrl: "https://docs.airbyte.io/integrations/sources/amazon-ads" + documentationUrl: "https://docs.airbyte.com/integrations/sources/amazon-ads" connectionSpecification: title: "Amazon Ads Spec" type: "object" @@ -100,72 +100,73 @@ order: 0 type: "string" client_id: - title: "Client Id" - description: "Oauth client id How to create your Login with Amazon" - name: "Client ID" + title: "Client ID" + description: "The Client ID of your Amazon Ads developer application." + order: 1 type: "string" client_secret: title: "Client Secret" - description: "Oauth client secret How to create your Login with Amazon" - name: "Client secret" + description: "The Client Secret of your Amazon Ads developer application." airbyte_secret: true + order: 2 type: "string" refresh_token: title: "Refresh Token" - description: "Oauth 2.0 refresh_token, read details here" - name: "Oauth refresh token" + description: "Amazon Ads Refresh Token. See the docs for more information on how to obtain this token." airbyte_secret: true - type: "string" - start_date: - title: "Start Date" - description: "Start date for collectiong reports, should not be more than\ - \ 60 days in past. In YYYY-MM-DD format" - name: "Start date" - examples: - - "2022-10-10" - - "2022-10-22" + order: 3 type: "string" region: - description: "An enumeration." - default: "NA" - name: "Region" - title: "AmazonAdsRegion" + title: "Region *" + description: "Region to pull data from (EU/NA/FE/SANDBOX). See docs for more details." enum: - "NA" - "EU" - "FE" - "SANDBOX" type: "string" - profiles: - title: "Profiles" - description: "profile Ids you want to fetch data for" - name: "Profile Ids" - type: "array" - items: - type: "integer" + default: "NA" + order: 4 report_wait_timeout: - title: "Report Wait Timeout" + title: "Report Wait Timeout *" description: "Timeout duration in minutes for Reports. Eg. 30" default: 30 - name: "Report Wait Timeout" examples: - 30 - 120 + order: 5 type: "integer" report_generation_max_retries: - title: "Report Generation Max Retries" + title: "Report Generation Maximum Retries *" description: "Maximum retries Airbyte will attempt for fetching Report Data.\ \ Eg. 5" default: 5 - name: "Report Geration Maximum Retries" examples: - 5 - 10 - 15 + order: 6 type: "integer" + start_date: + title: "Start Date (Optional)" + description: "The Start date for collecting reports, should not be more\ + \ than 60 days in the past. In YYYY-MM-DD format" + examples: + - "2022-10-10" + - "2022-10-22" + order: 7 + type: "string" + profiles: + title: "Profile IDs (Optional)" + description: "Profile IDs you want to fetch data for. See docs for more details." + order: 8 + type: "array" + items: + type: "integer" required: - "client_id" - "client_secret" diff --git a/airbyte-integrations/connectors/source-amazon-ads/Dockerfile b/airbyte-integrations/connectors/source-amazon-ads/Dockerfile index bb4f4e6cb7b2..c2b37d453265 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/Dockerfile +++ b/airbyte-integrations/connectors/source-amazon-ads/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.6 +LABEL io.airbyte.version=0.1.7 LABEL io.airbyte.name=airbyte/source-amazon-ads diff --git a/airbyte-integrations/connectors/source-amazon-ads/integration_tests/spec.json b/airbyte-integrations/connectors/source-amazon-ads/integration_tests/spec.json index 18ee31dc98a9..1eb16cca0e35 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-amazon-ads/integration_tests/spec.json @@ -1,5 +1,5 @@ { - "documentationUrl": "https://docs.airbyte.io/integrations/sources/amazon-ads", + "documentationUrl": "https://docs.airbyte.com/integrations/sources/amazon-ads", "connectionSpecification": { "title": "Amazon Ads Spec", "type": "object", @@ -12,64 +12,64 @@ "type": "string" }, "client_id": { - "title": "Client Id", - "description": "Oauth client id How to create your Login with Amazon", - "name": "Client ID", + "title": "Client ID", + "description": "The Client ID of your Amazon Ads developer application.", + "order": 1, "type": "string" }, "client_secret": { "title": "Client Secret", - "description": "Oauth client secret How to create your Login with Amazon", - "name": "Client secret", + "description": "The Client Secret of your Amazon Ads developer application.", "airbyte_secret": true, + "order": 2, "type": "string" }, "refresh_token": { "title": "Refresh Token", - "description": "Oauth 2.0 refresh_token, read details here", - "name": "Oauth refresh token", + "description": "Amazon Ads Refresh Token. See the docs for more information on how to obtain this token.", "airbyte_secret": true, - "type": "string" - }, - "start_date": { - "title": "Start Date", - "description": "Start date for collectiong reports, should not be more than 60 days in past. In YYYY-MM-DD format", - "name": "Start date", - "examples": ["2022-10-10", "2022-10-22"], + "order": 3, "type": "string" }, "region": { - "description": "An enumeration.", + "title": "Region *", + "description": "Region to pull data from (EU/NA/FE/SANDBOX). See docs for more details.", "default": "NA", - "name": "Region", - "title": "AmazonAdsRegion", "enum": ["NA", "EU", "FE", "SANDBOX"], + "order": 4, "type": "string" }, - "profiles": { - "title": "Profiles", - "description": "profile Ids you want to fetch data for", - "name": "Profile Ids", - "type": "array", - "items": { - "type": "integer" - } - }, "report_wait_timeout": { - "title": "Report Wait Timeout", + "title": "Report Wait Timeout *", "description": "Timeout duration in minutes for Reports. Eg. 30", "default": 30, - "name": "Report Wait Timeout", "examples": [30, 120], + "order": 5, "type": "integer" }, "report_generation_max_retries": { - "title": "Report Generation Max Retries", + "title": "Report Generation Maximum Retries *", "description": "Maximum retries Airbyte will attempt for fetching Report Data. Eg. 5", "default": 5, - "name": "Report Geration Maximum Retries", "examples": [5, 10, 15], + "order": 6, "type": "integer" + }, + "start_date": { + "title": "Start Date (Optional)", + "description": "The Start date for collecting reports, should not be more than 60 days in the past. In YYYY-MM-DD format", + "examples": ["2022-10-10", "2022-10-22"], + "order": 7, + "type": "string" + }, + "profiles": { + "title": "Profile IDs (Optional)", + "description": "Profile IDs you want to fetch data for. See docs for more details.", + "order": 8, + "type": "array", + "items": { + "type": "integer" + } } }, "required": ["client_id", "client_secret", "refresh_token"] diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/source.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/source.py index 03da13e17907..1d836efff447 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/source.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/source.py @@ -93,7 +93,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: def spec(self, *args) -> ConnectorSpecification: return ConnectorSpecification( - documentationUrl="https://docs.airbyte.io/integrations/sources/amazon-ads", + documentationUrl="https://docs.airbyte.com/integrations/sources/amazon-ads", connectionSpecification=AmazonAdsConfig.schema(), advanced_auth=advanced_auth, ) diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.py index abda177093fa..7efc1db550dc 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.py @@ -5,6 +5,7 @@ from typing import List from airbyte_cdk.models import AdvancedAuth, AuthFlowType, OAuthConfigSpecification +from airbyte_cdk.sources.utils.schema_helpers import expand_refs from pydantic import BaseModel, Extra, Field from source_amazon_ads.constants import AmazonAdsRegion @@ -21,67 +22,71 @@ class Config: auth_type: str = Field(default="oauth2.0", const=True, order=0) client_id: str = Field( - name="Client ID", - description=( - 'Oauth client id ' - "How to create your Login with Amazon" - ), + title="Client ID", + description="The Client ID of your Amazon Ads developer application.", + order=1, ) + client_secret: str = Field( - name="Client secret", - description=( - 'Oauth client secret ' - "How to create your Login with Amazon" - ), + title="Client Secret", + description="The Client Secret of your Amazon Ads developer application.", airbyte_secret=True, + order=2, ) refresh_token: str = Field( - name="Oauth refresh token", - description=( - 'Oauth 2.0 refresh_token, ' - "read details here" - ), + title="Refresh Token", + description='Amazon Ads Refresh Token. See the docs for more information on how to obtain this token.', airbyte_secret=True, + order=3, ) - start_date: str = Field( - None, - name="Start date", - description="Start date for collectiong reports, should not be more than 60 days in past. In YYYY-MM-DD format", - examples=["2022-10-10", "2022-10-22"], - ) - - region: AmazonAdsRegion = Field(name="Region", description="Region to pull data from (EU/NA/FE/SANDBOX)", default=AmazonAdsRegion.NA) - - profiles: List[int] = Field( - None, - name="Profile Ids", - description="profile Ids you want to fetch data for", + region: AmazonAdsRegion = Field( + title="Region *", + description='Region to pull data from (EU/NA/FE/SANDBOX). See docs for more details.', + default=AmazonAdsRegion.NA, + order=4, ) report_wait_timeout: int = Field( - name="Report Wait Timeout", + title="Report Wait Timeout *", description="Timeout duration in minutes for Reports. Eg. 30", default=30, examples=[30, 120], + order=5, ) report_generation_max_retries: int = Field( - name="Report Geration Maximum Retries", + title="Report Generation Maximum Retries *", description="Maximum retries Airbyte will attempt for fetching Report Data. Eg. 5", default=5, examples=[5, 10, 15], + order=6, + ) + + start_date: str = Field( + None, + title="Start Date (Optional)", + description="The Start date for collecting reports, should not be more than 60 days in the past. In YYYY-MM-DD format", + examples=["2022-10-10", "2022-10-22"], + order=7, + ) + + profiles: List[int] = Field( + None, + title="Profile IDs (Optional)", + description='Profile IDs you want to fetch data for. See docs for more details.', + order=8, ) @classmethod def schema(cls, **kwargs): schema = super().schema(**kwargs) + expand_refs(schema) # Transform pydantic generated enum for region - definitions = schema.pop("definitions", None) - if definitions: - schema["properties"]["region"].update(definitions["AmazonAdsRegion"]) - schema["properties"]["region"].pop("allOf", None) + if schema["properties"]["region"].get("allOf"): + schema["properties"]["region"] = {**schema["properties"]["region"]["allOf"][0], **schema["properties"]["region"]} + schema["properties"]["region"].pop("allOf") return schema diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/products_report.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/products_report.py index 136d3d14ce82..8da6b2037a97 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/products_report.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/products_report.py @@ -266,7 +266,7 @@ def _get_init_report_body(self, report_date: str, record_type: str, profile): metrics_list = copy(metrics_list) metrics_list.remove("sku") -# adId is automatically added to the report by amazon and requesting adId causes an amazon error + # adId is automatically added to the report by amazon and requesting adId causes an amazon error if "adId" in metrics_list: metrics_list.remove("adId") diff --git a/docs/integrations/sources/amazon-ads.md b/docs/integrations/sources/amazon-ads.md index 766b6946fa05..65af43664364 100644 --- a/docs/integrations/sources/amazon-ads.md +++ b/docs/integrations/sources/amazon-ads.md @@ -59,10 +59,11 @@ Information about expected report generation waiting time you may find [here](ht * client\_id * client\_secret * refresh\_token -* scope -* profiles * region +* report\_wait\_timeout +* report\_generation\_max\_retries * start\_date \(optional\) +* profiles \(optional\) More how to get client\_id and client\_secret you can find on [AWS docs](https://advertising.amazon.com/API/docs/en-us/setting-up/step-1-create-lwa-app). @@ -76,6 +77,7 @@ Start date used for generating reports starting from the specified start date. S | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | +| `0.1.7` | 2022-04-27 | [\#11730](https://github.com/airbytehq/airbyte/pull/11730) | Update fields in source-connectors specifications | | `0.1.6` | 2022-04-20 | [\#11659](https://github.com/airbytehq/airbyte/pull/11659) | Add adId to products report | | `0.1.5` | 2022-04-08 | [\#11430](https://github.com/airbytehq/airbyte/pull/11430) | `Added support OAuth2.0` | | `0.1.4` | 2022-02-21 | [\#10513](https://github.com/airbytehq/airbyte/pull/10513) | `Increasing REPORT_WAIT_TIMEOUT for supporting report generation which takes longer time ` | From fd4b71e25f0cd39f7f1a2330ff5d7f3ba93020dc Mon Sep 17 00:00:00 2001 From: Baz Date: Thu, 28 Apr 2022 14:57:50 +0300 Subject: [PATCH 34/39] =?UTF-8?q?=F0=9F=90=9B=20=20Source=20Amplitude:=20a?= =?UTF-8?q?dd=20error=20descriptions=20and=20fix=20`events`=20stream=20fai?= =?UTF-8?q?l=20on=20404=20(#12430)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 2 +- .../connectors/source-amplitude/Dockerfile | 2 +- .../source-amplitude/source_amplitude/api.py | 71 ++++++++++++------- .../source_amplitude/errors.py | 34 +++++++++ .../source-amplitude/unit_tests/unit_test.py | 21 ++++-- docs/integrations/sources/amplitude.md | 1 + 7 files changed, 102 insertions(+), 31 deletions(-) create mode 100644 airbyte-integrations/connectors/source-amplitude/source_amplitude/errors.py diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 21604d505e03..d9d89726f440 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -35,7 +35,7 @@ - name: Amplitude sourceDefinitionId: fa9f58c6-2d03-4237-aaa4-07d75e0c1396 dockerRepository: airbyte/source-amplitude - dockerImageTag: 0.1.4 + dockerImageTag: 0.1.5 documentationUrl: https://docs.airbyte.io/integrations/sources/amplitude icon: amplitude.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index f1c4be36edef..e18e005985dc 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -476,7 +476,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-amplitude:0.1.4" +- dockerImage: "airbyte/source-amplitude:0.1.5" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/amplitude" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-amplitude/Dockerfile b/airbyte-integrations/connectors/source-amplitude/Dockerfile index 210e952009ec..f2cad066bbc6 100644 --- a/airbyte-integrations/connectors/source-amplitude/Dockerfile +++ b/airbyte-integrations/connectors/source-amplitude/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.4 +LABEL io.airbyte.version=0.1.5 LABEL io.airbyte.name=airbyte/source-amplitude diff --git a/airbyte-integrations/connectors/source-amplitude/source_amplitude/api.py b/airbyte-integrations/connectors/source-amplitude/source_amplitude/api.py index f5e3e68a3f22..0d2908608a5e 100644 --- a/airbyte-integrations/connectors/source-amplitude/source_amplitude/api.py +++ b/airbyte-integrations/connectors/source-amplitude/source_amplitude/api.py @@ -17,6 +17,8 @@ from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.streams.http import HttpStream +from .errors import HTTP_ERROR_CODES, error_msg_from_status + class AmplitudeStream(HttpStream, ABC): @@ -27,8 +29,12 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, return None def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_data = response.json() - yield from response_data.get(self.name, []) + status = response.status_code + if status in HTTP_ERROR_CODES.keys(): + error_msg_from_status(status) + yield from [] + else: + yield from response.json().get(self.data_field, []) def path(self, **kwargs) -> str: return f"{self.api_version}/{self.name}" @@ -37,14 +43,12 @@ def path(self, **kwargs) -> str: class Cohorts(AmplitudeStream): primary_key = "id" api_version = 3 + data_field = "cohorts" class Annotations(AmplitudeStream): primary_key = "id" - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_data = response.json() - yield from response_data.get("data", []) + data_field = "data" class IncrementalAmplitudeStream(AmplitudeStream, ABC): @@ -124,6 +128,22 @@ def _parse_zip_file(self, zip_file: IO[bytes]) -> Iterable[Mapping]: for record in file: yield json.loads(record) + def stream_slices(self, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: + slices = [] + start = self._start_date + if stream_state: + start = pendulum.parse(stream_state.get(self.cursor_field)) + end = pendulum.now() + while start <= end: + slices.append( + { + "start": start.strftime(self.date_template), + "end": self._get_end_date(start).strftime(self.date_template), + } + ) + start = start.add(**self.time_interval) + return slices + def read_records( self, sync_mode: SyncMode, @@ -132,34 +152,35 @@ def read_records( stream_state: Mapping[str, Any] = None, ) -> Iterable[Mapping[str, Any]]: stream_state = stream_state or {} - params = self.request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=None) - # API returns data only when requested with a difference between 'start' and 'end' of 6 or more hours. - if pendulum.parse(params["start"]).add(hours=6) > pendulum.parse(params["end"]): - return [] + start = pendulum.parse(stream_slice["start"]).add(hours=6) + end = pendulum.parse(stream_slice["end"]) + if start > end: + yield from [] + # sometimes the API throws a 404 error for not obvious reasons, we have to handle it and log it. # for example, if there is no data from the specified time period, a 404 exception is thrown # https://developers.amplitude.com/docs/export-api#status-codes + try: + self.logger.info(f"Fetching {self.name} time range: {start.strftime('%Y-%m-%d')} - {end.strftime('%Y-%m-%d')}") yield from super().read_records(sync_mode, cursor_field, stream_slice, stream_state) except requests.exceptions.HTTPError as error: - if error.response.status_code == 404: - self.logger.warn(f"Error during syncing {self.name} stream - {error}") - return [] + status = error.response.status_code + if status in HTTP_ERROR_CODES.keys(): + error_msg_from_status(status) + yield from [] else: + self.logger.error(f"Error during syncing {self.name} stream - {error}") raise - def request_params( - self, stream_state: Mapping[str, Any], next_page_token: Mapping[str, Any] = None, **kwargs - ) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state, next_page_token=next_page_token, **kwargs) - if stream_state or next_page_token: - params["start"] = pendulum.parse(params["start"]).add(hours=1).strftime(self.date_template) + def request_params(self, stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: + params = self.base_params + params["start"] = pendulum.parse(stream_slice["start"]).strftime(self.date_template) + params["end"] = pendulum.parse(stream_slice["end"]).strftime(self.date_template) return params - def path( - self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> str: + def path(self, **kwargs) -> str: return f"{self.api_version}/export" @@ -168,9 +189,10 @@ class ActiveUsers(IncrementalAmplitudeStream): name = "active_users" primary_key = "date" time_interval = {"months": 1} + data_field = "data" def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_data = response.json().get("data", []) + response_data = response.json().get(self.data_field, []) if response_data: series = list(map(list, zip(*response_data["series"]))) for i, date in enumerate(response_data["xValues"]): @@ -184,9 +206,10 @@ class AverageSessionLength(IncrementalAmplitudeStream): name = "average_session_length" primary_key = "date" time_interval = {"days": 15} + data_field = "data" def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_data = response.json().get("data", []) + response_data = response.json().get(self.data_field, []) if response_data: # From the Amplitude documentation it follows that "series" is an array with one element which is itself # an array that contains the average session length for each day. diff --git a/airbyte-integrations/connectors/source-amplitude/source_amplitude/errors.py b/airbyte-integrations/connectors/source-amplitude/source_amplitude/errors.py new file mode 100644 index 000000000000..037dd7f6401b --- /dev/null +++ b/airbyte-integrations/connectors/source-amplitude/source_amplitude/errors.py @@ -0,0 +1,34 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import logging + +LOGGER = logging.getLogger("airbyte") + +HTTP_ERROR_CODES = { + 400: { + "msg": "The file size of the exported data is too large. Shorten the time ranges and try again. The limit size is 4GB.", + "lvl": "ERROR", + }, + 404: { + "msg": "No data collected", + "lvl": "WARN", + }, + 504: { + "msg": "The amount of data is large causing a timeout. For large amounts of data, the Amazon S3 destination is recommended.", + "lvl": "ERROR", + }, +} + + +def error_msg_from_status(status: int = None): + if status: + level = HTTP_ERROR_CODES[status]["lvl"] + message = HTTP_ERROR_CODES[status]["msg"] + if level == "ERROR": + LOGGER.error(message) + elif level == "WARN": + LOGGER.warn(message) + else: + LOGGER.error(f"Unknown error occured: code {status}") diff --git a/airbyte-integrations/connectors/source-amplitude/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-amplitude/unit_tests/unit_test.py index 8eb316ca38e3..f691390f40ec 100755 --- a/airbyte-integrations/connectors/source-amplitude/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-amplitude/unit_tests/unit_test.py @@ -2,9 +2,9 @@ # Copyright (c) 2021 Airbyte, Inc., all rights reserved. # -import airbyte_cdk.models import pytest import requests +from airbyte_cdk.models import SyncMode from source_amplitude.api import Events @@ -13,16 +13,29 @@ def __init__(self, status_code): self.status_code = status_code -def test_http_error_handler(mocker): +def test_incremental_http_error_handler(mocker): stream = Events(start_date="2021-01-01T00:00:00Z") + stream_slice = stream.stream_slices()[0] mock_response = MockRequest(404) send_request_mocker = mocker.patch.object(stream, "_send_request", side_effect=requests.HTTPError(**{"response": mock_response})) with pytest.raises(StopIteration): - result = next(stream.read_records(sync_mode=airbyte_cdk.models.SyncMode.full_refresh)) + result = next(stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice)) assert result == [] mock_response = MockRequest(403) send_request_mocker.side_effect = requests.HTTPError(**{"response": mock_response}) with pytest.raises(requests.exceptions.HTTPError): - next(stream.read_records(sync_mode=airbyte_cdk.models.SyncMode.full_refresh)) + next(stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice)) + + mock_response = MockRequest(400) + send_request_mocker.side_effect = requests.HTTPError(**{"response": mock_response}) + with pytest.raises(StopIteration): + result = next(stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice)) + assert result == [] + + mock_response = MockRequest(504) + send_request_mocker.side_effect = requests.HTTPError(**{"response": mock_response}) + with pytest.raises(StopIteration): + result = next(stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice)) + assert result == [] diff --git a/docs/integrations/sources/amplitude.md b/docs/integrations/sources/amplitude.md index 8039e147b80e..93384fd00613 100644 --- a/docs/integrations/sources/amplitude.md +++ b/docs/integrations/sources/amplitude.md @@ -45,6 +45,7 @@ Please read [How to get your API key and Secret key](https://help.amplitude.com/ | Version | Date | Pull Request | Subject | | :------ | :--------- | :----------------------------------------------------- | :------ | +| 0.1.5 | 2022-04-28 | [12430](https://github.com/airbytehq/airbyte/pull/12430) | Added HTTP error descriptions and fixed `Events` stream fail caused by `404` HTTP Error | | 0.1.4 | 2021-12-23 | [8434](https://github.com/airbytehq/airbyte/pull/8434) | Update fields in source-connectors specifications | | 0.1.3 | 2021-10-12 | [6375](https://github.com/airbytehq/airbyte/pull/6375) | Log Transient 404 Error in Events stream | | 0.1.2 | 2021-09-21 | [6353](https://github.com/airbytehq/airbyte/pull/6353) | Correct output schemas on cohorts, events, active\_users, and average\_session\_lengths streams | From 87beaf52603c06773def0c4ab33f8919f7562eac Mon Sep 17 00:00:00 2001 From: Denis Davydov Date: Thu, 28 Apr 2022 16:04:27 +0300 Subject: [PATCH 35/39] Source Smartsheets: fix docs to certify for Beta (#12203) * #5520 fix scrambled columns bug * #5520 source smartsheets: add changelog item * #5520 move pytest to optional setup requirements * #12003 source smartsheets: implement incremental read + tests * #12003 source smartsheet: add changelog * #12003 source smartsheets: fix merge conflict on unit tests * #12003 source smartsheets: fix startdate in spec * #11759 source smartsheets: fix doc to certify for Beta --- .../connectors/source-smartsheets/Dockerfile | 2 +- .../source_smartsheets/spec.json | 9 +- docs/integrations/sources/smartsheets.md | 115 +++++++++--------- 3 files changed, 65 insertions(+), 61 deletions(-) diff --git a/airbyte-integrations/connectors/source-smartsheets/Dockerfile b/airbyte-integrations/connectors/source-smartsheets/Dockerfile index cb26f971e9da..8a86cdfb6699 100644 --- a/airbyte-integrations/connectors/source-smartsheets/Dockerfile +++ b/airbyte-integrations/connectors/source-smartsheets/Dockerfile @@ -14,5 +14,5 @@ COPY $CODE_PATH ./$CODE_PATH ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.10 +LABEL io.airbyte.version=0.1.11 LABEL io.airbyte.name=airbyte/source-smartsheets diff --git a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json index 93c5d422ea23..5e027cac477e 100644 --- a/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json +++ b/airbyte-integrations/connectors/source-smartsheets/source_smartsheets/spec.json @@ -11,20 +11,23 @@ "title": "Access Token", "description": "The Access Token for making authenticated requests. Find in the main menu: Account > Apps & Integrations > API Access", "type": "string", + "order": 0, "airbyte_secret": true }, "spreadsheet_id": { "title": "Sheet ID", "description": "The spreadsheet ID. Find in the spreadsheet menu: File > Properties", - "type": "string" + "type": "string", + "order": 1 }, "start_datetime": { - "title": "Start Datetime", + "title": "Start Datetime (Optional)", "type": "string", "examples": ["2000-01-01T13:00:00", "2000-01-01T13:00:00-07:00"], "description": "ISO 8601, for instance: `YYYY-MM-DDTHH:MM:SS`, `YYYY-MM-DDTHH:MM:SS+HH:MM`", "format": "date-time", - "default": "2020-01-01T00:00:00+00:00" + "default": "2020-01-01T00:00:00+00:00", + "order": 2 } } }, diff --git a/docs/integrations/sources/smartsheets.md b/docs/integrations/sources/smartsheets.md index 595e348d673d..f06e8a20df8a 100644 --- a/docs/integrations/sources/smartsheets.md +++ b/docs/integrations/sources/smartsheets.md @@ -1,93 +1,94 @@ # Smartsheets -### Table of Contents +This page guides you through the process of setting up the Smartsheets source connector. -* [Sync Details](smartsheets.md#sync-details) - * [Column datatype mapping](smartsheets.md#column-datatype-mapping) - * [Features](smartsheets.md#Features) - * [Performance Considerations](smartsheets.md#performance-considerations) -* [Getting Started](smartsheets.md#getting-started) - * [Requirements](smartsheets.md#requirements) - * [Setup Guide](smartsheets.md#setup-guide) - * [Configuring the source in the Airbyte UI](smartsheets.md#configuring-the-source-in-the-airbyte-ui) +## Prerequisites -## Sync Details - -The Smartsheet Source is written to pull data from a single Smartsheet spreadsheet. Unlike Google Sheets, Smartsheets only allows one sheet per Smartsheet - so a given Airbyte connector instance can sync only one sheet at a time. - -To replicate multiple spreadsheets, you can create multiple instances of the Smartsheet Source in Airbyte, reusing the API token for all your sheets that you need to sync. - -**Note: Column headers must contain only alphanumeric characters or `_` , as specified in the** [**Airbyte Protocol**](../../understanding-airbyte/airbyte-specification.md). +To configure the Smartsheet Source for syncs, you'll need the following: -### Column datatype mapping +* A Smartsheets API access token - generated by a Smartsheets user with at least **read** access +* The ID of the spreadsheet you'd like to sync -The data type mapping adopted by this connector is based on the Smartsheet [documentation](https://smartsheet.redoc.ly/tag/columnsRelated#section/Column-Types). +## Step 1: Set up Smartsheets -**NOTE**: For any column datatypes interpreted by Smartsheets beside `DATE` and `DATETIME`, this connector's source schema generation assumes a `string` type, in which case the `format` field is not required by Airbyte. +### Obtain a Smartsheets API access token -| Integration Type | Airbyte Type | Airbyte Format | -|:-----------------|:-------------|:---------------------| -| `TEXT_NUMBER` | `string` | | -| `DATE` | `string` | `format: date` | -| `DATETIME` | `string` | `format: date-time` | -| `anything else` | `string` | | +You can generate an API key for your account from a session of your Smartsheet webapp by clicking: -The remaining column datatypes supported by Smartsheets are more complex types \(e.g. Predecessor, Dropdown List\) and are not supported by this connector beyond its `string` representation. +* Account (top-right icon) +* Apps & Integrations +* API Access +* Generate new access token -### Features +For questions on advanced authorization flows, refer to [this](https://www.smartsheet.com/content-center/best-practices/tips-tricks/api-getting-started). -This source connector only supports Full Refresh Sync. Since Smartsheets only allows 5000 rows per sheet, it's likely that the Full Refresh Sync Mode will suit the majority of use-cases. +### Prepare the spreadsheet ID of your Smartsheet -| Feature | Supported? | -|:------------------|:-----------| -| Full Refresh Sync | Yes | -| Incremental Sync | No | -| Namespaces | No | +You'll also need the ID of the Spreadsheet you'd like to sync. Unlike Google Sheets, this ID is not found in the URL. You can find the required spreadsheet ID from your Smartsheet app session by going to: -### Performance considerations +* File +* Properties -At the time of writing, the [Smartsheets API rate limit](https://developers.smartsheet.com/blog/smartsheet-api-best-practices#:~:text=The%20Smartsheet%20API%20currently%20imposes,per%20minute%20per%20Access%20Token.) is 300 requests per minute per API access token. This connector makes 6 API calls per sync operation. +## Step 2: Set up the Smartsheets connector in Airbyte -## Getting started +**For Airbyte Cloud:** -### Requirements +1. [Log into your Airbyte Cloud](https://cloud.airbyte.io/workspaces) account. +2. In the left navigation bar, click Sources. In the top-right corner, click **+new source**. +3. On the Set up the source page, enter the name for the Smartsheets connector and select **Smartsheets** from the Source type dropdown. +4. Authenticate via OAuth2.0 using the API access token from Prerequisites +5. Enter the start date and the ID of the spreadsheet you want to sync +6. Submit the form -To configure the Smartsheet Source for syncs, you'll need the following: +**For Airbyte OSS:** +1. Navigate to the Airbute Open Source dashboard +2. Set the name for your source +3. Enter the API access token from Prerequisites +4. Enter the ID of the spreadsheet you want to sync +5. Enter a start sync date +6. Click **Set up source** -* A Smartsheets API access token - generated by a Smartsheets user with at least **read** access -* The ID of the spreadsheet you'd like to sync +## Supported sync modes -### Setup guide +The Smartsheets source connector supports the following[ sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-sync-modes): + - Full Refresh | Overwrite + - Full Refresh | Append + - Incremental | Append + - Incremental | Deduped -#### Obtain a Smartsheets API access token +## Performance considerations -You can generate an API key for your account from a session of your Smartsheet webapp by clicking: +At the time of writing, the [Smartsheets API rate limit](https://developers.smartsheet.com/blog/smartsheet-api-best-practices#be-practical-adhere-to-rate-limiting-guidelines) is 300 requests per minute per API access token. -* Account \(top-right icon\) -* Apps & Integrations -* API Access -* Generate new access token +## Supported streams -For questions on advanced authorization flows, refer to [this](https://www.smartsheet.com/content-center/best-practices/tips-tricks/api-getting-started). +This source provides a single stream per spreadsheet with a dynamic schema, depending on your spreadsheet structure. +For example, having a spreadsheet `Customers`, the connector would introduce a stream with the same name and properties typed according to Data type map (see [below](https://docs.airbyte.com/integrations/sources/smartsheets/#data-type-map)). -#### The spreadsheet ID of your Smartsheet +## Important highlights +The Smartsheet Source is written to pull data from a single Smartsheet spreadsheet. Unlike Google Sheets, Smartsheets only allows one sheet per Smartsheet - so a given Airbyte connector instance can sync only one sheet at a time. To replicate multiple spreadsheets, you can create multiple instances of the Smartsheet Source in Airbyte, reusing the API token for all your sheets that you need to sync. -You'll also need the ID of the Spreadsheet you'd like to sync. Unlike Google Sheets, this ID is not found in the URL. You can find the required spreadsheet ID from your Smartsheet app session by going to: +**Note: Column headers must contain only alphanumeric characters or `_` , as specified in the** [**Airbyte Protocol**](../../understanding-airbyte/airbyte-specification.md). -* File -* Properties +## Data type map +The data type mapping adopted by this connector is based on the Smartsheet [documentation](https://smartsheet-platform.github.io/api-docs/index.html?python#column-types). -### Configuring the source in the Airbyte UI +**NOTE**: For any column datatypes interpreted by Smartsheets beside `DATE` and `DATETIME`, this connector's source schema generation assumes a `string` type, in which case the `format` field is not required by Airbyte. -To setup your new Smartsheets source, Airbyte will need: +| Integration Type | Airbyte Type | Airbyte Format | +|:-----------------|:-------------|:---------------------| +| `TEXT_NUMBER` | `string` | | +| `DATE` | `string` | `format: date` | +| `DATETIME` | `string` | `format: date-time` | +| `anything else` | `string` | | -1. Your API access token -2. The spreadsheet ID +The remaining column datatypes supported by Smartsheets are more complex types (e.g. Predecessor, Dropdown List) and are not supported by this connector beyond its `string` representation. ## Changelog | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:----------------------------------------------------------| -| 0.1.10 | 2022-04-15 | [12077](https://github.com/airbytehq/airbyte/pull/12077) | Implement incremental read and improve code test coverage | +| 0.1.11 | 2022-04-27 | [12203](https://github.com/airbytehq/airbyte/pull/12203) | Doc improvements | +| 0.1.10 | 2022-04-15 | [12077](https://github.com/airbytehq/airbyte/pull/12077) | Implement incremental read and improve code test coverage | | 0.1.9 | 2022-04-12 | [11911](https://github.com/airbytehq/airbyte/pull/11911) | Bugfix: scrambled columns | | 0.1.8 | 2022-02-04 | [9792](https://github.com/airbytehq/airbyte/pull/9792) | Added oauth support | From e45d1fafc06b51686ea616ed0882c6375522c79f Mon Sep 17 00:00:00 2001 From: Ohcui <42260310+Ohcui@users.noreply.github.com> Date: Thu, 28 Apr 2022 21:35:19 +0800 Subject: [PATCH 36/39] OpenPI config: remove stale required key (#12341) --- airbyte-api/src/main/openapi/config.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/airbyte-api/src/main/openapi/config.yaml b/airbyte-api/src/main/openapi/config.yaml index 477d7ee688da..c1fdc0044d4b 100644 --- a/airbyte-api/src/main/openapi/config.yaml +++ b/airbyte-api/src/main/openapi/config.yaml @@ -3143,7 +3143,6 @@ components: WebBackendConnectionCreate: type: object required: - - connection - sourceId - destinationId - status From 31485d7b8107c092d2d9fb75379b627b43a8599c Mon Sep 17 00:00:00 2001 From: Jagannath Saragadam Date: Thu, 28 Apr 2022 08:14:27 -0700 Subject: [PATCH 37/39] Source Google Ads: change *_labels stream type from int to string in schema (#12158) --- .../resources/seed/source_definitions.yaml | 2 +- .../src/main/resources/seed/source_specs.yaml | 2 +- .../connectors/source-google-ads/Dockerfile | 2 +- .../schemas/ad_group_ad_labels.json | 6 +- .../schemas/ad_group_labels.json | 6 +- .../schemas/campaign_labels.json | 4 +- docs/integrations/sources/google-ads.md | 76 ++++++++++--------- 7 files changed, 50 insertions(+), 48 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index d9d89726f440..2f1f37afafa6 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -272,7 +272,7 @@ - name: Google Ads sourceDefinitionId: 253487c0-2246-43ba-a21f-5116b20a2c50 dockerRepository: airbyte/source-google-ads - dockerImageTag: 0.1.35 + dockerImageTag: 0.1.36 documentationUrl: https://docs.airbyte.io/integrations/sources/google-ads icon: google-adwords.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index e18e005985dc..8830d25e2ba5 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -2644,7 +2644,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-google-ads:0.1.35" +- dockerImage: "airbyte/source-google-ads:0.1.36" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/google-ads" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-google-ads/Dockerfile b/airbyte-integrations/connectors/source-google-ads/Dockerfile index 5557152bd28b..5d34b4e8a4a4 100644 --- a/airbyte-integrations/connectors/source-google-ads/Dockerfile +++ b/airbyte-integrations/connectors/source-google-ads/Dockerfile @@ -13,5 +13,5 @@ RUN pip install . ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.35 +LABEL io.airbyte.version=0.1.36 LABEL io.airbyte.name=airbyte/source-google-ads diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/ad_group_ad_labels.json b/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/ad_group_ad_labels.json index 5338f22aac8a..50c0377ae578 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/ad_group_ad_labels.json +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/ad_group_ad_labels.json @@ -3,16 +3,16 @@ "type": "object", "properties": { "ad_group_ad.ad.resource_name": { - "type": ["null", "integer"] + "type": ["null", "string"] }, "ad_group_ad_label.resource_name": { "type": ["null", "string"] }, "label.name": { - "type": ["null", "integer"] + "type": ["null", "string"] }, "label.resource_name": { - "type": ["null", "integer"] + "type": ["null", "string"] } } } diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/ad_group_labels.json b/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/ad_group_labels.json index 226aa6384198..ad0fb593eeeb 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/ad_group_labels.json +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/ad_group_labels.json @@ -3,16 +3,16 @@ "type": "object", "properties": { "ad_group.resource_name": { - "type": ["null", "integer"] + "type": ["null", "string"] }, "ad_group_label.resource_name": { "type": ["null", "string"] }, "label.name": { - "type": ["null", "integer"] + "type": ["null", "string"] }, "label.resource_name": { - "type": ["null", "integer"] + "type": ["null", "string"] } } } diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/campaign_labels.json b/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/campaign_labels.json index 8ecd42b3dd5e..022d767958f9 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/campaign_labels.json +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/schemas/campaign_labels.json @@ -9,10 +9,10 @@ "type": ["null", "string"] }, "label.name": { - "type": ["null", "integer"] + "type": ["null", "string"] }, "label.resource_name": { - "type": ["null", "integer"] + "type": ["null", "string"] } } } diff --git a/docs/integrations/sources/google-ads.md b/docs/integrations/sources/google-ads.md index 68adfc021679..c6c2b1a38dea 100644 --- a/docs/integrations/sources/google-ads.md +++ b/docs/integrations/sources/google-ads.md @@ -8,12 +8,12 @@ If you don't already have a developer token from Google Ads, make sure you follo ## Features -| Feature | Supported? | -| :--- | :--- | -| Full Refresh Sync | Yes | -| Incremental Sync | Yes | -| Replicate Incremental Deletes | No | -| SSL connection | Yes | +| Feature | Supported? | +| :---------------------------- | :--------- | +| Full Refresh Sync | Yes | +| Incremental Sync | Yes | +| Replicate Incremental Deletes | No | +| SSL connection | Yes | ## Supported Tables @@ -21,27 +21,27 @@ This source is capable of syncing the following tables and their data: ### Main Tables -* [accounts](https://developers.google.com/google-ads/api/fields/v8/customer) -* [ad\_group\_ads](https://developers.google.com/google-ads/api/fields/v8/ad_group_ad) -* [ad\_group\_ad\_labels](https://developers.google.com/google-ads/api/fields/v8/ad_group_ad_label) -* [ad\_groups](https://developers.google.com/google-ads/api/fields/v8/ad_group) -* [ad\_group\_labels](https://developers.google.com/google-ads/api/fields/v8/ad_group_label) -* [campaigns](https://developers.google.com/google-ads/api/fields/v8/campaign) -* [campaign\_labels](https://developers.google.com/google-ads/api/fields/v8/campaign_label) -* [click\_view](https://developers.google.com/google-ads/api/reference/rpc/v8/ClickView) -* [keyword](https://developers.google.com/google-ads/api/fields/v8/keyword_view) -* [geographic](https://developers.google.com/google-ads/api/fields/v8/geographic_view) +- [accounts](https://developers.google.com/google-ads/api/fields/v8/customer) +- [ad_group_ads](https://developers.google.com/google-ads/api/fields/v8/ad_group_ad) +- [ad_group_ad_labels](https://developers.google.com/google-ads/api/fields/v8/ad_group_ad_label) +- [ad_groups](https://developers.google.com/google-ads/api/fields/v8/ad_group) +- [ad_group_labels](https://developers.google.com/google-ads/api/fields/v8/ad_group_label) +- [campaigns](https://developers.google.com/google-ads/api/fields/v8/campaign) +- [campaign_labels](https://developers.google.com/google-ads/api/fields/v8/campaign_label) +- [click_view](https://developers.google.com/google-ads/api/reference/rpc/v8/ClickView) +- [keyword](https://developers.google.com/google-ads/api/fields/v8/keyword_view) +- [geographic](https://developers.google.com/google-ads/api/fields/v8/geographic_view) Note that `ad_groups`, `ad_group_ads`, and `campaigns` contain a `labels` field, which should be joined against their respective `*_labels` streams if you want to view the actual labels. For example, the `ad_groups` stream contains an `ad_group.labels` field, which you would join against the `ad_group_labels` stream's `label.resource_name` field. ### Report Tables -* [account\_performance\_report](https://developers.google.com/google-ads/api/docs/migration/mapping#account_performance) -* [ad\_group\_ad\_report](https://developers.google.com/google-ads/api/docs/migration/mapping#ad_performance) -* [display\_keyword\_report](https://developers.google.com/google-ads/api/docs/migration/mapping#display_keyword_performance) -* [display\_topics\_report](https://developers.google.com/google-ads/api/docs/migration/mapping#display_topics_performance) -* [shopping\_performance\_report](https://developers.google.com/google-ads/api/docs/migration/mapping#shopping_performance) -* [user_location_report](https://developers.google.com/google-ads/api/fields/v8/user_location_view) +- [account_performance_report](https://developers.google.com/google-ads/api/docs/migration/mapping#account_performance) +- [ad_group_ad_report](https://developers.google.com/google-ads/api/docs/migration/mapping#ad_performance) +- [display_keyword_report](https://developers.google.com/google-ads/api/docs/migration/mapping#display_keyword_performance) +- [display_topics_report](https://developers.google.com/google-ads/api/docs/migration/mapping#display_topics_performance) +- [shopping_performance_report](https://developers.google.com/google-ads/api/docs/migration/mapping#shopping_performance) +- [user_location_report](https://developers.google.com/google-ads/api/fields/v8/user_location_view) **Note**: Due to constraints from the Google Ads API, the `click_view` stream retrieves data one day at a time and can only retrieve data newer than 90 days ago @@ -64,13 +64,14 @@ Note that `ad_groups`, `ad_group_ads`, and `campaigns` contain a `labels` field, Google Ads Account with an approved Developer Token. \(note: In order to get API access to Google Ads, you must have a "manager" account; standard accounts cannot generate a Developer Token. This manager account must be created separately from your standard account. You can find more information about this distinction in the [Google Ads docs](https://support.google.com/google-ads/answer/6139186).\) You'll need to find these values. See the [setup guide](#setup-guide) for instructions. -* developer\_token -* client\_id -* client\_secret -* refresh\_token -* start\_date -* customer\_id -* login\_customer\_id \(you can find more information about this field in [Google Ads docs](https://developers.google.com/google-ads/api/docs/concepts/call-structure#cid)\) + +- developer_token +- client_id +- client_secret +- refresh_token +- start_date +- customer_id +- login_customer_id \(you can find more information about this field in [Google Ads docs](https://developers.google.com/google-ads/api/docs/concepts/call-structure#cid)\) ### Setup guide @@ -79,7 +80,7 @@ This guide will provide information as if starting from scratch. Please skip ove 1. Create an Google Ads Account. Here are [Google's instruction](https://support.google.com/google-ads/answer/6366720) on how to create one. 2. Create an Google Ads MANAGER Account. Here are [Google's instruction](https://ads.google.com/home/tools/manager-accounts/) on how to create one. 3. You should now have two Google Ads accounts: a normal account and a manager account. Link the Manager account to the normal account following [Google's documentation](https://support.google.com/google-ads/answer/7459601). -4. Apply for a developer token \(**make sure you follow our** [**instructions**](google-ads.md#how-to-apply-for-the-developer-token)\) on your Manager account. This token allows you to access your data from the Google Ads API. Here are [Google's instructions](https://developers.google.com/google-ads/api/docs/first-call/dev-token). The docs are a little unclear on this point, but you will _not_ be able to access your data via the Google Ads API until this token is approved. You cannot use a test developer token, it has to be at least a basic developer token. It usually takes Google 24 hours to respond to these applications. This developer token is the value you will use in the `developer_token` field. +4. Apply for a developer token \(**make sure you follow our** [**instructions**](google-ads.md#how-to-apply-for-the-developer-token)\) on your Manager account. This token allows you to access your data from the Google Ads API. Here are [Google's instructions](https://developers.google.com/google-ads/api/docs/first-call/dev-token). The docs are a little unclear on this point, but you will _not_ be able to access your data via the Google Ads API until this token is approved. You cannot use a test developer token, it has to be at least a basic developer token. It usually takes Google 24 hours to respond to these applications. This developer token is the value you will use in the `developer_token` field. 5. Fetch your `client_id`, `client_secret`, and `refresh_token`. Google provides [instructions](https://developers.google.com/google-ads/api/docs/first-call/overview) on how to do this. 6. Select your `customer_id`. The `customer_id` refers to the id of each of your Google Ads accounts. This is the 10 digit number in the top corner of the page when you are in Google Ads UI. The source will only pull data from the accounts for which you provide an id. If you are having trouble finding it, check out [Google's instructions](https://support.google.com/google-ads/answer/1704344). @@ -91,10 +92,10 @@ Google is very picky about which software and which use case can get access to a When you apply for a token, you need to mention: -* Why you need the token \(eg: want to run some internal analytics...\) -* That you will be using the Airbyte Open Source project -* That you have full access to the code base \(because we're open source\) -* That you have full access to the server running the code \(because you're self-hosting Airbyte\) +- Why you need the token \(eg: want to run some internal analytics...\) +- That you will be using the Airbyte Open Source project +- That you have full access to the code base \(because we're open source\) +- That you have full access to the server running the code \(because you're self-hosting Airbyte\) #### Understanding Google Ads Query Language @@ -106,9 +107,10 @@ This source is constrained by whatever API limits are set for the Google Ads tha ## CHANGELOG -| Version | Date | Pull Request | Subject | -|:---------|:-----------| :--- |:---------------------------------------------------------------------------------------------| -| `0.1.35` | 2022-04-18 | [9310](https://github.com/airbytehq/airbyte/pull/9310) | Add new fields to reports | +| Version | Date | Pull Request | Subject | +| :------- | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------------------------------- | +| `0.1.36` | 2022-04-19 | [12158](https://github.com/airbytehq/airbyte/pull/12158) | Fix `*_labels` streams data type | +| `0.1.35` | 2022-04-18 | [9310](https://github.com/airbytehq/airbyte/pull/9310) | Add new fields to reports | | `0.1.34` | 2022-03-29 | [11602](https://github.com/airbytehq/airbyte/pull/11602) | Add budget amount to campaigns stream. | | `0.1.33` | 2022-03-29 | [11513](https://github.com/airbytehq/airbyte/pull/11513) | When `end_date` is configured in the future, use today's date instead. | | `0.1.32` | 2022-03-24 | [11371](https://github.com/airbytehq/airbyte/pull/11371) | Improve how connection check returns error messages | From aab15334745b4ae8d841d5543c5fef681cb94344 Mon Sep 17 00:00:00 2001 From: Andrii Leonets <30464745+DoNotPanicUA@users.noreply.github.com> Date: Thu, 28 Apr 2022 18:26:48 +0300 Subject: [PATCH 38/39] Pubsub, Pulsar, Redis, Redshift, Rocket destinations : Enable DAT tests (#12143) * enable DAT tests for Pulsar * Enable DAT test for pubsub, redis, redshift, rocket * format * fix normalized data fetch * cover "other" result type for arrays * remove deserialization because now we have already parsed node * fix bugspot * fix unicode case --- .../PubsubDestinationAcceptanceTest.java | 22 ++++++ .../pulsar/PulsarRecordConsumer.java | 3 +- .../PulsarDestinationAcceptanceTest.java | 22 ++++++ .../redis/RedisDestinationAcceptanceTest.java | 22 ++++++ ...RedshiftCopyDestinationAcceptanceTest.java | 69 ++++++++++++++----- .../redshift/RedshiftTestDataComparator.java | 57 +++++++++++++++ .../RocksetDestinationAcceptanceTest.java | 22 ++++++ 7 files changed, 200 insertions(+), 17 deletions(-) create mode 100644 airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftTestDataComparator.java diff --git a/airbyte-integrations/connectors/destination-pubsub/src/test-integration/java/io/airbyte/integrations/destination/pubsub/PubsubDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-pubsub/src/test-integration/java/io/airbyte/integrations/destination/pubsub/PubsubDestinationAcceptanceTest.java index a85de5db1945..aaadb701bd9f 100644 --- a/airbyte-integrations/connectors/destination-pubsub/src/test-integration/java/io/airbyte/integrations/destination/pubsub/PubsubDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-pubsub/src/test-integration/java/io/airbyte/integrations/destination/pubsub/PubsubDestinationAcceptanceTest.java @@ -38,6 +38,8 @@ import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.io.ByteArrayInputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -91,6 +93,26 @@ private AirbyteStreamNameNamespacePair fromJsonNode(final JsonNode j) { return new AirbyteStreamNameNamespacePair(stream, namespace); } + @Override + protected TestDataComparator getTestDataComparator() { + return new AdvancedTestDataComparator(); + } + + @Override + protected boolean supportBasicDataTypeTest() { + return true; + } + + @Override + protected boolean supportArrayDataTypeTest() { + return true; + } + + @Override + protected boolean supportObjectDataTypeTest() { + return true; + } + @Override protected List retrieveRecords(final TestDestinationEnv testEnv, final String streamName, diff --git a/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumer.java b/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumer.java index c22ac5c056c5..129b9e86348c 100644 --- a/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumer.java +++ b/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumer.java @@ -11,6 +11,7 @@ import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; import java.util.Optional; @@ -69,7 +70,7 @@ protected void acceptTracked(final AirbyteMessage airbyteMessage) { .set(PulsarDestination.COLUMN_NAME_AB_ID, key) .set(PulsarDestination.COLUMN_NAME_STREAM, recordMessage.getStream()) .set(PulsarDestination.COLUMN_NAME_EMITTED_AT, recordMessage.getEmittedAt()) - .set(PulsarDestination.COLUMN_NAME_DATA, recordMessage.getData().toString().getBytes()) + .set(PulsarDestination.COLUMN_NAME_DATA, recordMessage.getData().toString().getBytes(StandardCharsets.UTF_8)) .build(); sendRecord(producer, value); diff --git a/airbyte-integrations/connectors/destination-pulsar/src/test-integration/java/io/airbyte/integrations/destination/pulsar/PulsarDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-pulsar/src/test-integration/java/io/airbyte/integrations/destination/pulsar/PulsarDestinationAcceptanceTest.java index 26dae59de485..f31ec96dbfa6 100644 --- a/airbyte-integrations/connectors/destination-pulsar/src/test-integration/java/io/airbyte/integrations/destination/pulsar/PulsarDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-pulsar/src/test-integration/java/io/airbyte/integrations/destination/pulsar/PulsarDestinationAcceptanceTest.java @@ -15,6 +15,8 @@ import io.airbyte.integrations.destination.NamingConventionTransformer; import io.airbyte.integrations.destination.StandardNameTransformer; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.io.IOException; import java.net.InetAddress; import java.net.NetworkInterface; @@ -103,6 +105,26 @@ protected String getDefaultSchema(final JsonNode config) { return ""; } + @Override + protected TestDataComparator getTestDataComparator() { + return new AdvancedTestDataComparator(); + } + + @Override + protected boolean supportBasicDataTypeTest() { + return true; + } + + @Override + protected boolean supportArrayDataTypeTest() { + return true; + } + + @Override + protected boolean supportObjectDataTypeTest() { + return true; + } + @Override protected List retrieveNormalizedRecords(final TestDestinationEnv testEnv, final String streamName, final String namespace) throws IOException { diff --git a/airbyte-integrations/connectors/destination-redis/src/test-integration/java/io/airbyte/integrations/destination/redis/RedisDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-redis/src/test-integration/java/io/airbyte/integrations/destination/redis/RedisDestinationAcceptanceTest.java index ca80552dc3cb..9dcc2312e2fc 100644 --- a/airbyte-integrations/connectors/destination-redis/src/test-integration/java/io/airbyte/integrations/destination/redis/RedisDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-redis/src/test-integration/java/io/airbyte/integrations/destination/redis/RedisDestinationAcceptanceTest.java @@ -7,6 +7,8 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; @@ -68,6 +70,26 @@ protected boolean implementsNamespaces() { return true; } + @Override + protected TestDataComparator getTestDataComparator() { + return new AdvancedTestDataComparator(); + } + + @Override + protected boolean supportBasicDataTypeTest() { + return true; + } + + @Override + protected boolean supportArrayDataTypeTest() { + return true; + } + + @Override + protected boolean supportObjectDataTypeTest() { + return true; + } + @Override protected List retrieveRecords(TestDestinationEnv testEnv, String streamName, diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java index f0e729a1ca5e..231252bb6b1a 100644 --- a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftCopyDestinationAcceptanceTest.java @@ -5,20 +5,24 @@ package io.airbyte.integrations.destination.redshift; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; import io.airbyte.db.Database; import io.airbyte.db.Databases; -import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.nio.file.Path; import java.sql.SQLException; -import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; +import org.jooq.Record; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Integration test testing {@link RedshiftCopyS3Destination}. The default Redshift integration test @@ -26,6 +30,8 @@ */ public class RedshiftCopyDestinationAcceptanceTest extends DestinationAcceptanceTest { + private static final Logger LOGGER = LoggerFactory.getLogger(RedshiftCopyDestinationAcceptanceTest.class); + // config from which to create / delete schemas. private JsonNode baseConfig; // config which refers to the schema that the test is being run in. @@ -34,6 +40,8 @@ public class RedshiftCopyDestinationAcceptanceTest extends DestinationAcceptance protected TestDestinationEnv testDestinationEnv; + private final ObjectMapper mapper = new ObjectMapper(); + @Override protected String getImageName() { return "airbyte/destination-redshift:dev"; @@ -55,6 +63,26 @@ protected JsonNode getFailCheckConfig() { return invalidConfig; } + @Override + protected TestDataComparator getTestDataComparator() { + return new RedshiftTestDataComparator(); + } + + @Override + protected boolean supportBasicDataTypeTest() { + return true; + } + + @Override + protected boolean supportArrayDataTypeTest() { + return true; + } + + @Override + protected boolean supportObjectDataTypeTest() { + return true; + } + @Override protected List retrieveRecords(final TestDestinationEnv env, final String streamName, @@ -63,7 +91,7 @@ protected List retrieveRecords(final TestDestinationEnv env, throws Exception { return retrieveRecordsFromTable(namingResolver.getRawTableName(streamName), namespace) .stream() - .map(j -> Jsons.deserialize(j.get(JavaBaseConstants.COLUMN_NAME_DATA).asText())) + .map(j -> j.get(JavaBaseConstants.COLUMN_NAME_DATA)) .collect(Collectors.toList()); } @@ -93,17 +121,27 @@ protected List retrieveNormalizedRecords(final TestDestinationEnv test return retrieveRecordsFromTable(tableName, namespace); } - @Override - protected List resolveIdentifier(final String identifier) { - final List result = new ArrayList<>(); - final String resolved = namingResolver.getIdentifier(identifier); - result.add(identifier); - result.add(resolved); - if (!resolved.startsWith("\"")) { - result.add(resolved.toLowerCase()); - result.add(resolved.toUpperCase()); - } - return result; + private JsonNode getJsonFromRecord(Record record) { + ObjectNode node = mapper.createObjectNode(); + + Arrays.stream(record.fields()).forEach(field -> { + var value = record.get(field); + + switch (field.getDataType().getTypeName()) { + case "varchar", "other": + var stringValue = (value != null ? value.toString() : null); + if (stringValue != null && (stringValue.replaceAll("[^\\x00-\\x7F]", "").matches("^\\[.*\\]$") + || stringValue.replaceAll("[^\\x00-\\x7F]", "").matches("^\\{.*\\}$"))) { + node.set(field.getName(), Jsons.deserialize(stringValue)); + } else { + node.put(field.getName(), stringValue); + } + break; + default: + node.put(field.getName(), (value != null ? value.toString() : null)); + } + }); + return node; } private List retrieveRecordsFromTable(final String tableName, final String schemaName) throws SQLException { @@ -111,8 +149,7 @@ private List retrieveRecordsFromTable(final String tableName, final St ctx -> ctx .fetch(String.format("SELECT * FROM %s.%s ORDER BY %s ASC;", schemaName, tableName, JavaBaseConstants.COLUMN_NAME_EMITTED_AT)) .stream() - .map(r -> r.formatJSON(JdbcUtils.getDefaultJSONFormat())) - .map(Jsons::deserialize) + .map(this::getJsonFromRecord) .collect(Collectors.toList())); } diff --git a/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftTestDataComparator.java b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftTestDataComparator.java new file mode 100644 index 000000000000..6b018e9cd7f4 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redshift/src/test-integration/java/io/airbyte/integrations/destination/redshift/RedshiftTestDataComparator.java @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2021 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redshift; + +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import java.time.DateTimeException; +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class RedshiftTestDataComparator extends AdvancedTestDataComparator { + + private static final Logger LOGGER = LoggerFactory.getLogger(RedshiftTestDataComparator.class); + + private final RedshiftSQLNameTransformer namingResolver = new RedshiftSQLNameTransformer(); + + protected static final String REDSHIFT_DATETIME_WITH_TZ_FORMAT = "yyyy-MM-dd HH:mm:ssX"; + + @Override + protected ZonedDateTime parseDestinationDateWithTz(String destinationValue) { + return ZonedDateTime.parse(destinationValue, DateTimeFormatter.ofPattern(REDSHIFT_DATETIME_WITH_TZ_FORMAT)).withZoneSameInstant(ZoneOffset.UTC); + } + + @Override + protected boolean compareDateTimeValues(String airbyteMessageValue, String destinationValue) { + try { + var format = DateTimeFormatter.ofPattern(AIRBYTE_DATETIME_FORMAT); + LocalDateTime dateTime = LocalDateTime.parse(destinationValue, DateTimeFormatter.ofPattern(REDSHIFT_DATETIME_WITH_TZ_FORMAT)); + return super.compareDateTimeValues(airbyteMessageValue, format.format(dateTime)); + } catch (DateTimeException e) { + LOGGER.warn("Fail to convert values to DateTime. Try to compare as text. Airbyte value({}), Destination value ({}). Exception: {}", + airbyteMessageValue, destinationValue, e); + return compareTextValues(airbyteMessageValue, destinationValue); + } + } + + @Override + protected List resolveIdentifier(final String identifier) { + final List result = new ArrayList<>(); + final String resolved = namingResolver.getIdentifier(identifier); + result.add(identifier); + result.add(resolved); + if (!resolved.startsWith("\"")) { + result.add(resolved.toLowerCase()); + result.add(resolved.toUpperCase()); + } + return result; + } + +} diff --git a/airbyte-integrations/connectors/destination-rockset/src/test-integration/java/io/airbyte/integrations/destination/rockset/RocksetDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-rockset/src/test-integration/java/io/airbyte/integrations/destination/rockset/RocksetDestinationAcceptanceTest.java index cfd96d02dffa..a0efe58576ac 100644 --- a/airbyte-integrations/connectors/destination-rockset/src/test-integration/java/io/airbyte/integrations/destination/rockset/RocksetDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-rockset/src/test-integration/java/io/airbyte/integrations/destination/rockset/RocksetDestinationAcceptanceTest.java @@ -18,6 +18,8 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.lang.Exceptions; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; @@ -54,6 +56,26 @@ protected JsonNode getConfig() throws IOException { return Jsons.deserialize(IOs.readFile(Path.of("secrets/config.json"))); } + @Override + protected TestDataComparator getTestDataComparator() { + return new AdvancedTestDataComparator(); + } + + @Override + protected boolean supportBasicDataTypeTest() { + return true; + } + + @Override + protected boolean supportArrayDataTypeTest() { + return true; + } + + @Override + protected boolean supportObjectDataTypeTest() { + return true; + } + @Override protected JsonNode getFailCheckConfig() throws Exception { return Jsons.jsonNode( From e0902e6e30fa467accb8531cf33135ecd84276cd Mon Sep 17 00:00:00 2001 From: Marcos Marx Date: Thu, 28 Apr 2022 13:00:28 -0300 Subject: [PATCH 39/39] :bug: Source Hubspot: correct createAt and updateAd data type (#12424) * correct createAt and updateAd data type * bump connectorversion * bump connector version again --- .../src/main/resources/seed/source_definitions.yaml | 2 +- .../init/src/main/resources/seed/source_specs.yaml | 2 +- .../connectors/source-hubspot/Dockerfile | 2 +- .../source_hubspot/schemas/ticket_pipelines.json | 12 ++++++++---- docs/integrations/sources/hubspot.md | 3 +++ 5 files changed, 14 insertions(+), 7 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 2f1f37afafa6..05a792432837 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -341,7 +341,7 @@ - name: HubSpot sourceDefinitionId: 36c891d9-4bd9-43ac-bad2-10e12756272c dockerRepository: airbyte/source-hubspot - dockerImageTag: 0.1.53 + dockerImageTag: 0.1.55 documentationUrl: https://docs.airbyte.io/integrations/sources/hubspot icon: hubspot.svg sourceType: api diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 8830d25e2ba5..ab3c619826b9 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -3480,7 +3480,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-hubspot:0.1.53" +- dockerImage: "airbyte/source-hubspot:0.1.55" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/hubspot" connectionSpecification: diff --git a/airbyte-integrations/connectors/source-hubspot/Dockerfile b/airbyte-integrations/connectors/source-hubspot/Dockerfile index d7b8bd20b572..504b0f5d2164 100644 --- a/airbyte-integrations/connectors/source-hubspot/Dockerfile +++ b/airbyte-integrations/connectors/source-hubspot/Dockerfile @@ -34,5 +34,5 @@ COPY source_hubspot ./source_hubspot ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.53 +LABEL io.airbyte.version=0.1.55 LABEL io.airbyte.name=airbyte/source-hubspot diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/ticket_pipelines.json b/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/ticket_pipelines.json index b360fb642613..5d75223f3153 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/ticket_pipelines.json +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/schemas/ticket_pipelines.json @@ -37,10 +37,12 @@ "type": ["null", "string"] }, "createdAt": { - "type": ["null", "integer"] + "type": ["null", "string"], + "format": "date-time" }, "updatedAt": { - "type": ["null", "integer"] + "type": ["null", "string"], + "format": "date-time" }, "active": { "type": ["null", "boolean"] @@ -49,10 +51,12 @@ } }, "createdAt": { - "type": ["null", "integer"] + "type": ["null", "string"], + "format": "date-time" }, "updatedAt": { - "type": ["null", "integer"] + "type": ["null", "string"], + "format": "date-time" } } } diff --git a/docs/integrations/sources/hubspot.md b/docs/integrations/sources/hubspot.md index ffabe35eca3d..353218e67d61 100644 --- a/docs/integrations/sources/hubspot.md +++ b/docs/integrations/sources/hubspot.md @@ -147,6 +147,9 @@ If you are using OAuth, most of the streams require the appropriate [scopes](htt | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------| +| 0.1.55 | 2022-04-28 | [12424](https://github.com/airbytehq/airbyte/pull/12424) | Correct schema for ticket_pipeline stream | +| 0.1.54 | 2022-04-28 | [12335](https://github.com/airbytehq/airbyte/pull/12335) | Mock time slep in unit test s | +| 0.1.53 | 2022-04-20 | [12230](https://github.com/airbytehq/airbyte/pull/12230) | chaneg spec json to yaml format | | 0.1.52 | 2022-03-25 | [11423](https://github.com/airbytehq/airbyte/pull/11423) | Add tickets associations to engagements streams | | 0.1.51 | 2022-03-24 | [11321](https://github.com/airbytehq/airbyte/pull/11321) | Fix updated at field non exists issue | | 0.1.50 | 2022-03-22 | [11266](https://github.com/airbytehq/airbyte/pull/11266) | Fix Engagements Stream Pagination |