diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index d523c7d254a0..66f4effe0e9c 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -99,6 +99,7 @@ /cloud-media-livestream/**/* @GoogleCloudPlatform/cloud-media-team @GoogleCloudPlatform/python-samples-reviewers /bigquery-connection/**/* @GoogleCloudPlatform/api-bigquery @GoogleCloudPlatform/python-samples-reviewers /bigquery-datatransfer/**/* @GoogleCloudPlatform/api-bigquery @GoogleCloudPlatform/python-samples-reviewers +/bigquery-migration/**/* @GoogleCloudPlatform/api-bigquery @GoogleCloudPlatform/python-samples-reviewers /dlp/**/* @GoogleCloudPlatform/googleapis-dlp @GoogleCloudPlatform/python-samples-reviewers /functions/spanner/* @GoogleCloudPlatform/api-spanner-python @GoogleCloudPlatform/functions-framework-google @GoogleCloudPlatform/python-samples-reviewers /healthcare/**/* @GoogleCloudPlatform/healthcare-life-sciences @GoogleCloudPlatform/python-samples-reviewers diff --git a/bigquery-migration/snippets/__init__.py b/bigquery-migration/snippets/__init__.py new file mode 100644 index 000000000000..89aac76c1512 --- /dev/null +++ b/bigquery-migration/snippets/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/bigquery-migration/snippets/create_migration_workflow.py b/bigquery-migration/snippets/create_migration_workflow.py new file mode 100644 index 000000000000..b5ce3eb51ee7 --- /dev/null +++ b/bigquery-migration/snippets/create_migration_workflow.py @@ -0,0 +1,73 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# [START bigquery_migration_create_workflow] +def create_migration_workflow( + gcs_input_path: str, gcs_output_path: str, project_id: str +) -> None: + """Creates a migration workflow of a Batch SQL Translation and prints the response.""" + + from google.cloud import bigquery_migration_v2 + + parent = f"projects/{project_id}/locations/us" + + # Construct a BigQuery Migration client object. + client = bigquery_migration_v2.MigrationServiceClient() + + # Set the source dialect to Teradata SQL. + source_dialect = bigquery_migration_v2.Dialect() + source_dialect.teradata_dialect = bigquery_migration_v2.TeradataDialect( + mode=bigquery_migration_v2.TeradataDialect.Mode.SQL + ) + + # Set the target dialect to BigQuery dialect. + target_dialect = bigquery_migration_v2.Dialect() + target_dialect.bigquery_dialect = bigquery_migration_v2.BigQueryDialect() + + # Prepare the config proto. + translation_config = bigquery_migration_v2.TranslationConfigDetails( + gcs_source_path=gcs_input_path, + gcs_target_path=gcs_output_path, + source_dialect=source_dialect, + target_dialect=target_dialect, + ) + + # Prepare the task. + migration_task = bigquery_migration_v2.MigrationTask( + type_="Translation_Teradata2BQ", translation_config_details=translation_config + ) + + # Prepare the workflow. + workflow = bigquery_migration_v2.MigrationWorkflow( + display_name="demo-workflow-python-example-Teradata2BQ" + ) + + workflow.tasks["translation-task"] = migration_task # type: ignore + + # Prepare the API request to create a migration workflow. + request = bigquery_migration_v2.CreateMigrationWorkflowRequest( + parent=parent, + migration_workflow=workflow, + ) + + response = client.create_migration_workflow(request=request) + + print("Created workflow:") + print(response.display_name) + print("Current state:") + print(response.State(response.state)) + + +# [END bigquery_migration_create_workflow] diff --git a/bigquery-migration/snippets/create_migration_workflow_test.py b/bigquery-migration/snippets/create_migration_workflow_test.py new file mode 100644 index 000000000000..d687cb649c05 --- /dev/null +++ b/bigquery-migration/snippets/create_migration_workflow_test.py @@ -0,0 +1,66 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from typing import Iterable, List, Optional + +from google.api_core.exceptions import ( + InternalServerError, + ServiceUnavailable, + TooManyRequests, +) +from google.cloud import storage +import pytest +from test_utils.retry import RetryErrors +from test_utils.system import unique_resource_id + +from . import create_migration_workflow + +retry_storage_errors = RetryErrors( + (TooManyRequests, InternalServerError, ServiceUnavailable) +) + +storage_client = storage.Client() +PROJECT_ID = storage_client.project + + +def _create_bucket(bucket_name: str, location: Optional[str] = None) -> storage.Bucket: + bucket = storage_client.bucket(bucket_name) + retry_storage_errors(storage_client.create_bucket)(bucket_name, location=location) + + return bucket + + +@pytest.fixture +def buckets_to_delete() -> Iterable[List]: + doomed = [] + yield doomed + for item in doomed: + if isinstance(item, storage.Bucket): + retry_storage_errors(item.delete)(force=True) + + +def test_create_migration_workflow( + capsys: pytest.CaptureFixture, buckets_to_delete: List[storage.Bucket] +) -> None: + bucket_name = "bq_migration_create_workflow_test" + unique_resource_id() + path = f"gs://{PROJECT_ID}/{bucket_name}" + bucket = _create_bucket(bucket_name) + buckets_to_delete.extend([bucket]) + + create_migration_workflow.create_migration_workflow(path, path, PROJECT_ID) + out, _ = capsys.readouterr() + + assert "demo-workflow-python-example-Teradata2BQ" in out + assert "Current state:" in out diff --git a/bigquery-migration/snippets/noxfile_config.py b/bigquery-migration/snippets/noxfile_config.py new file mode 100644 index 000000000000..c8377ecb974b --- /dev/null +++ b/bigquery-migration/snippets/noxfile_config.py @@ -0,0 +1,38 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be inported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7", "3.6"], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": True, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + # "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} diff --git a/bigquery-migration/snippets/requirements-test.txt b/bigquery-migration/snippets/requirements-test.txt new file mode 100644 index 000000000000..01750b6a9c9e --- /dev/null +++ b/bigquery-migration/snippets/requirements-test.txt @@ -0,0 +1,4 @@ +pytest==7.3.2 +google-cloud-testutils==1.3.3 +google-api-core==2.11.0 +google-cloud-storage==2.9.0 \ No newline at end of file diff --git a/bigquery-migration/snippets/requirements.txt b/bigquery-migration/snippets/requirements.txt new file mode 100644 index 000000000000..9eacdfaa18a3 --- /dev/null +++ b/bigquery-migration/snippets/requirements.txt @@ -0,0 +1 @@ +google-cloud-bigquery-migration==0.11.0