Skip to content

Commit

Permalink
docs(samples): add create_migration_workflow snippet (#71)
Browse files Browse the repository at this point in the history
* docs(samples): add handwritten snippet structure

* docs(samples): add create_migration_workflow snippet

* fix copyright

* remove types

* lint

* cleanup noxfile

* enforce type hints

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* Update samples/snippets/create_migration_workflow.py

Co-authored-by: Anthonios Partheniou <partheniou@google.com>

* Update samples/snippets/create_migration_workflow.py

Co-authored-by: Anthonios Partheniou <partheniou@google.com>

* pin dependencies

* add type annotations

Co-authored-by: Anthonios Partheniou <partheniou@google.com>
Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
3 people authored and leahecole committed Jul 11, 2023
1 parent 372cf0d commit cb14d90
Show file tree
Hide file tree
Showing 6 changed files with 200 additions and 0 deletions.
15 changes: 15 additions & 0 deletions bigquery-migration/snippets/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# -*- coding: utf-8 -*-
#
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
72 changes: 72 additions & 0 deletions bigquery-migration/snippets/create_migration_workflow.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# [START bigquery_migration_create_workflow]
def create_migration_workflow(
gcs_input_path: str, gcs_output_path: str, project_id: str
) -> None:
"""Creates a migration workflow of a Batch SQL Translation and prints the response."""

from google.cloud import bigquery_migration_v2

parent = f"projects/{project_id}/locations/us"

# Construct a BigQuery Migration client object.
client = bigquery_migration_v2.MigrationServiceClient()

# Set the source dialect to Teradata SQL.
source_dialect = bigquery_migration_v2.Dialect()
source_dialect.teradata_dialect = bigquery_migration_v2.TeradataDialect(
mode=bigquery_migration_v2.TeradataDialect.Mode.SQL
)

# Set the target dialect to BigQuery dialect.
target_dialect = bigquery_migration_v2.Dialect()
target_dialect.bigquery_dialect = bigquery_migration_v2.BigQueryDialect()

# Prepare the config proto.
translation_config = bigquery_migration_v2.TranslationConfigDetails(
gcs_source_path=gcs_input_path,
gcs_target_path=gcs_output_path,
source_dialect=source_dialect,
target_dialect=target_dialect,
)

# Prepare the task.
migration_task = bigquery_migration_v2.MigrationTask(
type_="Translation_Teradata2BQ", translation_config_details=translation_config
)

# Prepare the workflow.
workflow = bigquery_migration_v2.MigrationWorkflow(
display_name="demo-workflow-python-example-Teradata2BQ"
)

workflow.tasks["translation-task"] = migration_task # type: ignore

# Prepare the API request to create a migration workflow.
request = bigquery_migration_v2.CreateMigrationWorkflowRequest(
parent=parent,
migration_workflow=workflow,
)

response = client.create_migration_workflow(request=request)

print("Created workflow:")
print(response.display_name)
print("Current state:")
print(response.State(response.state))


# [END bigquery_migration_create_workflow]
69 changes: 69 additions & 0 deletions bigquery-migration/snippets/create_migration_workflow_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


from typing import Iterable, List, Optional

from google.api_core.exceptions import (
InternalServerError,
ServiceUnavailable,
TooManyRequests,
)

from google.cloud import storage

import pytest

from test_utils.retry import RetryErrors
from test_utils.system import unique_resource_id

from . import create_migration_workflow

retry_storage_errors = RetryErrors(
(TooManyRequests, InternalServerError, ServiceUnavailable)
)

storage_client = storage.Client()
PROJECT_ID = storage_client.project


def _create_bucket(bucket_name: str, location: Optional[str] = None) -> storage.Bucket:
bucket = storage_client.bucket(bucket_name)
retry_storage_errors(storage_client.create_bucket)(bucket_name, location=location)

return bucket


@pytest.fixture
def buckets_to_delete() -> Iterable[List]:
doomed = []
yield doomed
for item in doomed:
if isinstance(item, storage.Bucket):
retry_storage_errors(item.delete)(force=True)


def test_create_migration_workflow(
capsys: pytest.CaptureFixture, buckets_to_delete: List[storage.Bucket]
) -> None:
bucket_name = "bq_migration_create_workflow_test" + unique_resource_id()
path = f"gs://{PROJECT_ID}/{bucket_name}"
bucket = _create_bucket(bucket_name)
buckets_to_delete.extend([bucket])

create_migration_workflow.create_migration_workflow(path, path, PROJECT_ID)
out, _ = capsys.readouterr()

assert "demo-workflow-python-example-Teradata2BQ" in out
assert "Current state:" in out
38 changes: 38 additions & 0 deletions bigquery-migration/snippets/noxfile_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# Default TEST_CONFIG_OVERRIDE for python repos.

# You can copy this file into your directory, then it will be inported from
# the noxfile.py.

# The source of truth:
# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py

TEST_CONFIG_OVERRIDE = {
# You can opt out from the test for specific Python versions.
"ignored_versions": [],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
"enforce_type_hints": True,
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
# "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
"gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
"envs": {},
}
4 changes: 4 additions & 0 deletions bigquery-migration/snippets/requirements-test.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
pytest==6.2.5
google-cloud-testutils==1.3.0
google-api-core==2.8.0
google-cloud-storage==2.0.0
2 changes: 2 additions & 0 deletions bigquery-migration/snippets/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
google-cloud-bigquery-migration==0.4.1
protobuf==3.19.1

0 comments on commit cb14d90

Please sign in to comment.