From 19a3fe322136285449e55788e07423ff7f44c1c5 Mon Sep 17 00:00:00 2001 From: tetiana-karasova <62887365+tetiana-karasova@users.noreply.github.com> Date: Tue, 15 Feb 2022 02:42:28 +0100 Subject: [PATCH] docs(samples): add product import samples (#149) * feat: product_number variable is replaced with product_id Import product code samples are added * revert changes in README * fix typo in TEST_RESOURCES_SETUP_CLEANUP.md * include project number in import_products_big_query_table.py * use project_number in import_products_gcs.py; use f-string * use project_number in import_products_inline_source.py * use project_number in product/setup/setup_cleanup.py * use project_number in search_simple_query.py * use project_number in search_with_boost_spec.py * use project_number in search_with_filtering.py * use project number in search_with_ordering.py * use project_number in search_with_pagination.py * use project_number in search_with_query_expansion_spec.py * use project_number in test_resources_recovery * use project_number in search_with_facet_spec.py * fix typo in f-string * fix typos * use correct path to resources * lint * revert change to paths * resolve error where bq table doesn't exist * make setup more robust * lint * import product from inline source is fixed * use google.cloud.bigquery client * update test to reflect changes * lint Co-authored-by: Karl Weinmeister <11586922+kweinmeister@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- .../interactive-tutorials/README.md | 6 +- .../TEST_RESOURCES_SETUP_CLEANUP.md | 2 +- .../import_products_big_query_table.py | 101 +++++++ .../product/import_products_bq_test.py | 31 ++ .../product/import_products_gcs.py | 111 +++++++ .../product/import_products_gcs_test.py | 30 ++ .../product/import_products_inline_source.py | 149 ++++++++++ .../product/import_products_inline_test.py | 31 ++ .../interactive-tutorials/product/noxfile.py | 279 ++++++++++++++++++ .../product/noxfile_config.py | 32 ++ .../product/requirements-test.txt | 2 + .../product/requirements.txt | 4 + .../setup/products_create_bigquery_table.py | 31 ++ .../products_create_bigquery_table_test.py | 45 +++ .../setup/products_create_gcs_bucket.py | 28 ++ .../setup/products_create_gcs_bucket_test.py | 40 +++ .../setup/products_delete_gcs_bucket.py | 25 ++ .../product/setup/setup_cleanup.py | 208 +++++++++++++ .../create_test_resources.py | 30 +- .../remove_test_resources.py | 5 +- 20 files changed, 1167 insertions(+), 23 deletions(-) create mode 100644 generated_samples/interactive-tutorials/product/import_products_big_query_table.py create mode 100644 generated_samples/interactive-tutorials/product/import_products_bq_test.py create mode 100644 generated_samples/interactive-tutorials/product/import_products_gcs.py create mode 100644 generated_samples/interactive-tutorials/product/import_products_gcs_test.py create mode 100644 generated_samples/interactive-tutorials/product/import_products_inline_source.py create mode 100644 generated_samples/interactive-tutorials/product/import_products_inline_test.py create mode 100644 generated_samples/interactive-tutorials/product/noxfile.py create mode 100644 generated_samples/interactive-tutorials/product/noxfile_config.py create mode 100644 generated_samples/interactive-tutorials/product/requirements-test.txt create mode 100644 generated_samples/interactive-tutorials/product/requirements.txt create mode 100644 generated_samples/interactive-tutorials/product/setup/products_create_bigquery_table.py create mode 100644 generated_samples/interactive-tutorials/product/setup/products_create_bigquery_table_test.py create mode 100644 generated_samples/interactive-tutorials/product/setup/products_create_gcs_bucket.py create mode 100644 generated_samples/interactive-tutorials/product/setup/products_create_gcs_bucket_test.py create mode 100644 generated_samples/interactive-tutorials/product/setup/products_delete_gcs_bucket.py create mode 100644 generated_samples/interactive-tutorials/product/setup/setup_cleanup.py diff --git a/generated_samples/interactive-tutorials/README.md b/generated_samples/interactive-tutorials/README.md index 17593f8ab0b3..ca7204f71298 100644 --- a/generated_samples/interactive-tutorials/README.md +++ b/generated_samples/interactive-tutorials/README.md @@ -72,7 +72,7 @@ To run a code sample from the Cloud Shell, you need to be authenticated using th export GOOGLE_APPLICATION_CREDENTIALS=~/key.json ``` -### Set the PROJECT_NUMBER and PROJECT_ID environment variables +### Set the GOOGLE_CLOUD_PROJECT_NUMBER and GOOGLE_CLOUD_PROJECT environment variables You will run the code samples in your own Google Cloud project. To use the **project_number** and **project_id** in every request to the Retail API, you should first specify them as environment variables. @@ -80,11 +80,11 @@ You will run the code samples in your own Google Cloud project. To use the **pro 1. Set the **project_number** with the following command: ```bash - export PROJECT_NUMBER= + export GOOGLE_CLOUD_PROJECT_NUMBER= ``` 1. Set the **project_id** with the following command: ```bash - export PROJECT_ID= + export GOOGLE_CLOUD_PROJECT= ``` ### Install Google Cloud Retail libraries diff --git a/generated_samples/interactive-tutorials/TEST_RESOURCES_SETUP_CLEANUP.md b/generated_samples/interactive-tutorials/TEST_RESOURCES_SETUP_CLEANUP.md index f7989d003e60..89b1cf130731 100644 --- a/generated_samples/interactive-tutorials/TEST_RESOURCES_SETUP_CLEANUP.md +++ b/generated_samples/interactive-tutorials/TEST_RESOURCES_SETUP_CLEANUP.md @@ -4,7 +4,7 @@ To successfully import the catalog data for tests, the following environment variables should be set: - GOOGLE_CLOUD_PROJECT_NUMBER - - GOOGLE_CLOUD_PROJECT_ID + - GOOGLE_CLOUD_PROJECT - BUCKET_NAME - EVENTS_BUCKET_NAME These values are stored in the Secret Manager and will be submitted as diff --git a/generated_samples/interactive-tutorials/product/import_products_big_query_table.py b/generated_samples/interactive-tutorials/product/import_products_big_query_table.py new file mode 100644 index 000000000000..39b0d6f6b03c --- /dev/null +++ b/generated_samples/interactive-tutorials/product/import_products_big_query_table.py @@ -0,0 +1,101 @@ +# Copyright 2021 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START retail_import_products_from_big_query] +# Import products into a catalog from big query table using Retail API +# +import os +import time + +from google.cloud.retail import ( + BigQuerySource, + ImportProductsRequest, + ProductInputConfig, + ProductServiceClient, +) + +project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"] +project_id = os.environ["GOOGLE_CLOUD_PROJECT"] + +default_catalog = f"projects/{project_number}/locations/global/catalogs/default_catalog/branches/default_branch" +dataset_id = "products" +table_id = "products" + + +# TO CHECK ERROR HANDLING USE THE TABLE WITH INVALID PRODUCTS: +# table_id = "products_some_invalid" + + +# get import products from big query request +def get_import_products_big_query_request(reconciliation_mode): + # TO CHECK ERROR HANDLING PASTE THE INVALID CATALOG NAME HERE: + # default_catalog = "invalid_catalog_name" + big_query_source = BigQuerySource() + big_query_source.project_id = project_id + big_query_source.dataset_id = dataset_id + big_query_source.table_id = table_id + big_query_source.data_schema = "product" + + input_config = ProductInputConfig() + input_config.big_query_source = big_query_source + + import_request = ImportProductsRequest() + import_request.parent = default_catalog + import_request.reconciliation_mode = reconciliation_mode + import_request.input_config = input_config + + print("---import products from big query table request---") + print(import_request) + + return import_request + + +# call the Retail API to import products +def import_products_from_big_query(): + # TRY THE FULL RECONCILIATION MODE HERE: + reconciliation_mode = ImportProductsRequest.ReconciliationMode.INCREMENTAL + + import_big_query_request = get_import_products_big_query_request( + reconciliation_mode + ) + big_query_operation = ProductServiceClient().import_products( + import_big_query_request + ) + + print("---the operation was started:----") + print(big_query_operation.operation.name) + + while not big_query_operation.done(): + print("---please wait till operation is done---") + time.sleep(30) + print("---import products operation is done---") + + if big_query_operation.metadata is not None: + print("---number of successfully imported products---") + print(big_query_operation.metadata.success_count) + print("---number of failures during the importing---") + print(big_query_operation.metadata.failure_count) + else: + print("---operation.metadata is empty---") + + if big_query_operation.result is not None: + print("---operation result:---") + print(big_query_operation.result()) + else: + print("---operation.result is empty---") + + +import_products_from_big_query() + +# [END retail_import_products_from_big_query] diff --git a/generated_samples/interactive-tutorials/product/import_products_bq_test.py b/generated_samples/interactive-tutorials/product/import_products_bq_test.py new file mode 100644 index 000000000000..b743ae722ed3 --- /dev/null +++ b/generated_samples/interactive-tutorials/product/import_products_bq_test.py @@ -0,0 +1,31 @@ +# Copyright 2021 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +import subprocess + + +def test_import_products_bq(): + output = str( + subprocess.check_output("python import_products_big_query_table.py", shell=True) + ) + + assert re.match(".*import products from big query table request.*", output) + assert re.match(".*the operation was started.*", output) + assert re.match( + ".*projects/.*/locations/global/catalogs/default_catalog/branches/0/operations/import-products.*", + output, + ) + + assert re.match(".*number of successfully imported products.*316.*", output) diff --git a/generated_samples/interactive-tutorials/product/import_products_gcs.py b/generated_samples/interactive-tutorials/product/import_products_gcs.py new file mode 100644 index 000000000000..1a8656042d09 --- /dev/null +++ b/generated_samples/interactive-tutorials/product/import_products_gcs.py @@ -0,0 +1,111 @@ +# Copyright 2021 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START retail_import_products_from_gcs] +# Import products into a catalog from gcs using Retail API +# +import os +import time + +from google.cloud.retail import ( + GcsSource, + ImportErrorsConfig, + ImportProductsRequest, + ProductInputConfig, + ProductServiceClient, +) + + +# Read the project number from the environment variable +project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"] +project_id = os.environ["GOOGLE_CLOUD_PROJECT"] +bucket_name = os.environ["BUCKET_NAME"] + +# You can change the branch here. The "default_branch" is set to point to the branch "0" +default_catalog = f"projects/{project_number}/locations/global/catalogs/default_catalog/branches/default_branch" + +gcs_bucket = f"gs://{bucket_name}" +gcs_errors_bucket = f"{gcs_bucket}/error" +gcs_products_object = "products.json" + + +# TO CHECK ERROR HANDLING USE THE JSON WITH INVALID PRODUCT +# gcs_products_object = "products_some_invalid.json" + + +# get import products from gcs request +def get_import_products_gcs_request(gcs_object_name: str): + # TO CHECK ERROR HANDLING PASTE THE INVALID CATALOG NAME HERE: + # default_catalog = "invalid_catalog_name" + gcs_source = GcsSource() + gcs_source.input_uris = [f"{gcs_bucket}/{gcs_object_name}"] + + input_config = ProductInputConfig() + input_config.gcs_source = gcs_source + print("GRS source:") + print(gcs_source.input_uris) + + errors_config = ImportErrorsConfig() + errors_config.gcs_prefix = gcs_errors_bucket + + import_request = ImportProductsRequest() + import_request.parent = default_catalog + import_request.reconciliation_mode = ( + ImportProductsRequest.ReconciliationMode.INCREMENTAL + ) + import_request.input_config = input_config + import_request.errors_config = errors_config + + print("---import products from google cloud source request---") + print(import_request) + + return import_request + + +# call the Retail API to import products +def import_products_from_gcs(): + import_gcs_request = get_import_products_gcs_request(gcs_products_object) + gcs_operation = ProductServiceClient().import_products(import_gcs_request) + + print("---the operation was started:----") + print(gcs_operation.operation.name) + + while not gcs_operation.done(): + print("---please wait till operation is done---") + time.sleep(30) + print("---import products operation is done---") + + if gcs_operation.metadata is not None: + print("---number of successfully imported products---") + print(gcs_operation.metadata.success_count) + print("---number of failures during the importing---") + print(gcs_operation.metadata.failure_count) + else: + print("---operation.metadata is empty---") + + if gcs_operation.result is not None: + print("---operation result:---") + print(gcs_operation.result()) + else: + print("---operation.result is empty---") + + # The imported products needs to be indexed in the catalog before they become available for search. + print( + "Wait 2-5 minutes till products become indexed in the catalog, after that they will be available for search" + ) + + +import_products_from_gcs() + +# [END retail_import_products_from_gcs] diff --git a/generated_samples/interactive-tutorials/product/import_products_gcs_test.py b/generated_samples/interactive-tutorials/product/import_products_gcs_test.py new file mode 100644 index 000000000000..f8ec41496ec6 --- /dev/null +++ b/generated_samples/interactive-tutorials/product/import_products_gcs_test.py @@ -0,0 +1,30 @@ +# Copyright 2021 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +import subprocess + + +def test_import_products_gcs(): + output = str(subprocess.check_output("python import_products_gcs.py", shell=True)) + + assert re.match(".*import products from google cloud source request.*", output) + assert re.match('.*input_uris: "gs://.*/products.json".*', output) + assert re.match(".*the operation was started.*", output) + assert re.match( + ".*projects/.*/locations/global/catalogs/default_catalog/branches/0/operations/import-products.*", + output, + ) + + assert re.match(".*number of successfully imported products.*316.*", output) diff --git a/generated_samples/interactive-tutorials/product/import_products_inline_source.py b/generated_samples/interactive-tutorials/product/import_products_inline_source.py new file mode 100644 index 000000000000..23e899302b1f --- /dev/null +++ b/generated_samples/interactive-tutorials/product/import_products_inline_source.py @@ -0,0 +1,149 @@ +# Copyright 2021 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START retail_import_products_from_inline_source] +# Import products into a catalog from inline source using Retail API +# +import os +import random +import string +import time + +from google.cloud.retail import ( + ColorInfo, + FulfillmentInfo, + ImportProductsRequest, + PriceInfo, + Product, + ProductInlineSource, + ProductInputConfig, + ProductServiceClient, +) +from google.protobuf.field_mask_pb2 import FieldMask + +# Read the project number from the environment variable +project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"] + +default_catalog = f"projects/{project_number}/locations/global/catalogs/default_catalog/branches/default_branch" + + +# prepare product to import as inline source +def get_products(): + products = [] + product1 = Product() + product2 = Product() + + price_info1 = PriceInfo() + price_info1.price = 16.0 + price_info1.original_price = 45.0 + price_info1.cost = 12.0 + price_info1.currency_code = "USD" + + color_info1 = ColorInfo() + color_info1.color_families = ["Blue"] + color_info1.colors = ["Light blue", "Blue", "Dark blue"] + + fulfillment_info1 = FulfillmentInfo() + fulfillment_info1.type_ = "pickup-in-store" + fulfillment_info1.place_ids = ["store1", "store2"] + + field_mask1 = FieldMask(paths=["title", "categories", "price_info", "color_info"]) + + # TO CHECK ERROR HANDLING COMMENT OUT THE PRODUCT TITLE HERE: + product1.title = "#IamRemarkable Pen" + product1.id = "".join(random.sample(string.ascii_lowercase, 8)) + product1.categories = ["Office"] + product1.uri = "https://shop.googlemerchandisestore.com/Google+Redesign/Office/IamRemarkable+Pen" + product1.brands = ["#IamRemarkable"] + product1.price_info = price_info1 + product1.color_info = color_info1 + product1.fulfillment_info = [fulfillment_info1] + product1.retrievable_fields = field_mask1 + + price_info2 = PriceInfo() + price_info2.price = 35.0 + price_info2.original_price = 45.0 + price_info2.cost = 12.0 + price_info2.currency_code = "USD" + + color_info2 = ColorInfo() + color_info2.color_families = ["Blue"] + color_info2.colors = ["Sky blue"] + + fulfillment_info2 = FulfillmentInfo() + fulfillment_info2.type_ = "pickup-in-store" + fulfillment_info2.place_ids = ["store2", "store3"] + + field_mask2 = FieldMask(paths=["title", "categories", "price_info", "color_info"]) + + product2.title = "Android Embroidered Crewneck Sweater" + product2.id = "".join(random.sample(string.ascii_lowercase, 8)) + product2.categories = ["Apparel"] + product2.uri = "https://shop.googlemerchandisestore.com/Google+Redesign/Apparel/Android+Embroidered+Crewneck+Sweater" + product2.brands = ["Android"] + product2.price_info = price_info2 + product2.color_info = color_info2 + product2.fulfillment_info = [fulfillment_info2] + product2.retrievable_fields = field_mask2 + + products.append(product1) + products.append(product2) + return products + + +# get import products from inline source request +def get_import_products_inline_request(products_to_import): + # TO CHECK ERROR HANDLING PASTE THE INVALID CATALOG NAME HERE: + # default_catalog = "invalid_catalog_name" + inline_source = ProductInlineSource() + inline_source.products = products_to_import + + input_config = ProductInputConfig() + input_config.product_inline_source = inline_source + + import_request = ImportProductsRequest() + import_request.parent = default_catalog + import_request.input_config = input_config + + print("---import products from inline source request---") + print(import_request) + + return import_request + + +# call the Retail API to import products +def import_products_from_inline_source(): + import_request = get_import_products_inline_request(get_products()) + import_operation = ProductServiceClient().import_products(import_request) + + print("---the operation was started:----") + print(import_operation.operation.name) + + while not import_operation.done(): + print("---please wait till operation is done---") + time.sleep(5) + print("---import products operation is done---") + + if import_operation.metadata is not None: + print("---number of successfully imported products---") + print(import_operation.metadata.success_count) + print("---number of failures during the importing---") + print(import_operation.metadata.failure_count) + else: + print("---operation.metadata is empty---") + + +import_products_from_inline_source() + +# [END retail_import_products_from_inline_source] diff --git a/generated_samples/interactive-tutorials/product/import_products_inline_test.py b/generated_samples/interactive-tutorials/product/import_products_inline_test.py new file mode 100644 index 000000000000..388cd5b603b9 --- /dev/null +++ b/generated_samples/interactive-tutorials/product/import_products_inline_test.py @@ -0,0 +1,31 @@ +# Copyright 2021 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +import subprocess + + +def test_import_products_gcs(): + output = str( + subprocess.check_output("python import_products_inline_source.py", shell=True) + ) + + assert re.match(".*import products from inline source request.*", output) + assert re.match(".*the operation was started.*", output) + assert re.match( + ".*projects/.*/locations/global/catalogs/default_catalog/branches/0/operations/import-products.*", + output, + ) + + assert re.match(".*number of successfully imported products.*2.*", output) diff --git a/generated_samples/interactive-tutorials/product/noxfile.py b/generated_samples/interactive-tutorials/product/noxfile.py new file mode 100644 index 000000000000..20cdfc620138 --- /dev/null +++ b/generated_samples/interactive-tutorials/product/noxfile.py @@ -0,0 +1,279 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import glob +import os +from pathlib import Path +import sys +from typing import Callable, Dict, List, Optional + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +BLACK_VERSION = "black==19.10b0" + +# Copy `noxfile_config.py` to your directory and modify it instead. + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars() -> Dict[str, str]: + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + +# +# Style Checks +# + + +def _determine_local_import_names(start_dir: str) -> List[str]: + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + ".", + ] + session.run("flake8", *args) + + +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session: nox.sessions.Session) -> None: + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root() -> Optional[str]: + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session: nox.sessions.Session, path: str) -> None: + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/generated_samples/interactive-tutorials/product/noxfile_config.py b/generated_samples/interactive-tutorials/product/noxfile_config.py new file mode 100644 index 000000000000..eeab78b4df11 --- /dev/null +++ b/generated_samples/interactive-tutorials/product/noxfile_config.py @@ -0,0 +1,32 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7", "3.6"], + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {"BUCKET_NAME": "retail-interactive-tutorials"}, +} diff --git a/generated_samples/interactive-tutorials/product/requirements-test.txt b/generated_samples/interactive-tutorials/product/requirements-test.txt new file mode 100644 index 000000000000..bbf73145f7ee --- /dev/null +++ b/generated_samples/interactive-tutorials/product/requirements-test.txt @@ -0,0 +1,2 @@ +pytest==6.2.5 +pytest-xdist==2.5.0 diff --git a/generated_samples/interactive-tutorials/product/requirements.txt b/generated_samples/interactive-tutorials/product/requirements.txt new file mode 100644 index 000000000000..0ba6ea712f50 --- /dev/null +++ b/generated_samples/interactive-tutorials/product/requirements.txt @@ -0,0 +1,4 @@ +google==3.0.0 +google-cloud-retail==1.1.0 +google-cloud-storage==1.43.0 +google-cloud-bigquery==2.30.1 \ No newline at end of file diff --git a/generated_samples/interactive-tutorials/product/setup/products_create_bigquery_table.py b/generated_samples/interactive-tutorials/product/setup/products_create_bigquery_table.py new file mode 100644 index 000000000000..1ee25c5622d1 --- /dev/null +++ b/generated_samples/interactive-tutorials/product/setup/products_create_bigquery_table.py @@ -0,0 +1,31 @@ +# Copyright 2021 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from setup_cleanup import create_bq_dataset, create_bq_table, \ + upload_data_to_bq_table + +dataset = "products" +valid_products_table = "products" +invalid_products_table = "products_some_invalid" +product_schema = "../resources/product_schema.json" +valid_products_source_file = "../resources/products.json" +invalid_products_source_file = "../resources/products_some_invalid.json" + +create_bq_dataset(dataset) +create_bq_table(dataset, valid_products_table, product_schema) +upload_data_to_bq_table(dataset, valid_products_table, + valid_products_source_file, product_schema) +create_bq_table(dataset, invalid_products_table, product_schema) +upload_data_to_bq_table(dataset, invalid_products_table, + invalid_products_source_file, product_schema) diff --git a/generated_samples/interactive-tutorials/product/setup/products_create_bigquery_table_test.py b/generated_samples/interactive-tutorials/product/setup/products_create_bigquery_table_test.py new file mode 100644 index 000000000000..e6eee35350b3 --- /dev/null +++ b/generated_samples/interactive-tutorials/product/setup/products_create_bigquery_table_test.py @@ -0,0 +1,45 @@ +# Copyright 2021 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import re +import subprocess + +project_id = os.environ["GOOGLE_CLOUD_PROJECT"] + + +def test_create_bigquery_table(): + output = str( + subprocess.check_output( + 'python setup/products_create_bigquery_table.py', + shell=True)) + assert re.match( + f'.*Creating dataset {project_id}.products.*', output) + assert re.match( + f'(.*dataset {project_id}.products already exists.*|.*dataset is created.*)', output) + assert re.match( + f'.*Creating BigQuery table {project_id}.products.products.*', output) + assert re.match( + f'(.*table {project_id}.products.products already exists.*|.*table is created.*)', output) + assert re.match( + f'.*Uploading data from ../resources/products.json to the table {project_id}.products.products.*', output) + assert re.match( + f'.*Creating BigQuery table {project_id}.products.products_some_invalid.*', + output) + assert re.match( + f'(.*table {project_id}.products.products_some_invalid already exists.*|.*table is created.*)', + output) + assert re.match( + f'.*Uploading data from ../resources/products_some_invalid.json to the table {project_id}.products.products_some_invalid.*', + output) diff --git a/generated_samples/interactive-tutorials/product/setup/products_create_gcs_bucket.py b/generated_samples/interactive-tutorials/product/setup/products_create_gcs_bucket.py new file mode 100644 index 000000000000..92be7aa1be62 --- /dev/null +++ b/generated_samples/interactive-tutorials/product/setup/products_create_gcs_bucket.py @@ -0,0 +1,28 @@ +# Copyright 2021 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import os + +from setup_cleanup import create_bucket, upload_blob + +project_id = os.environ["GOOGLE_CLOUD_PROJECT"] +timestamp_ = datetime.datetime.now().timestamp().__round__() +bucket_name = "{}_products_{}".format(project_id, timestamp_) + +create_bucket(bucket_name) +upload_blob(bucket_name, "../resources/products.json") +upload_blob(bucket_name, "../resources/products_some_invalid.json") + +print("\nThe gcs bucket {} was created".format(bucket_name)) diff --git a/generated_samples/interactive-tutorials/product/setup/products_create_gcs_bucket_test.py b/generated_samples/interactive-tutorials/product/setup/products_create_gcs_bucket_test.py new file mode 100644 index 000000000000..2050974518ae --- /dev/null +++ b/generated_samples/interactive-tutorials/product/setup/products_create_gcs_bucket_test.py @@ -0,0 +1,40 @@ +# Copyright 2021 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +import subprocess + +from products_delete_gcs_bucket import delete_bucket_by_name + + +def test_create_gcs_bucket(): + output = str( + subprocess.check_output( + 'python setup/products_create_gcs_bucket.py', + shell=True)) + + bucket_name = re.search('The gcs bucket (.+?) was created', output).group(1) + delete_bucket_by_name(bucket_name) + + print("bucket_name = {}".format(bucket_name)) + + assert re.match( + '.*Creating new bucket.*', output) + assert re.match( + '(.*The gcs bucket.*?was created.*|.*Bucket.*?already exists.*)', output) + assert re.match( + '.*Uploading data from ../resources/products.json to the bucket.*', output) + assert re.match( + '.*Uploading data from ../resources/products_some_invalid.json to the bucket.*', + output) diff --git a/generated_samples/interactive-tutorials/product/setup/products_delete_gcs_bucket.py b/generated_samples/interactive-tutorials/product/setup/products_delete_gcs_bucket.py new file mode 100644 index 000000000000..010478cf7dea --- /dev/null +++ b/generated_samples/interactive-tutorials/product/setup/products_delete_gcs_bucket.py @@ -0,0 +1,25 @@ +# Copyright 2021 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from setup_cleanup import delete_bucket + + +def delete_bucket_by_name(name: str): + if name is None: + bucket_name = os.environ["BUCKET_NAME"] + delete_bucket(bucket_name) + else: + delete_bucket(name) diff --git a/generated_samples/interactive-tutorials/product/setup/setup_cleanup.py b/generated_samples/interactive-tutorials/product/setup/setup_cleanup.py new file mode 100644 index 000000000000..11027b9cc6ca --- /dev/null +++ b/generated_samples/interactive-tutorials/product/setup/setup_cleanup.py @@ -0,0 +1,208 @@ +# Copyright 2021 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import os +import re + +from google.api_core.exceptions import NotFound + +from google.cloud import bigquery +from google.cloud import storage +from google.cloud.retail_v2 import CreateProductRequest, DeleteProductRequest, \ + FulfillmentInfo, GetProductRequest, PriceInfo, Product, ProductServiceClient + +project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"] +project_id = os.environ["GOOGLE_CLOUD_PROJECT"] +default_catalog = f"projects/{project_number}/locations/global/catalogs/default_catalog" +default_branch_name = f"projects/{project_number}/locations/global/catalogs/default_catalog/branches/default_branch" + + +def generate_product() -> Product: + price_info = PriceInfo() + price_info.price = 30.0 + price_info.original_price = 35.5 + price_info.currency_code = "USD" + fulfillment_info = FulfillmentInfo() + fulfillment_info.type_ = "pickup-in-store" + fulfillment_info.place_ids = ["store0", "store1"] + return Product( + title='Nest Mini', + type_=Product.Type.PRIMARY, + categories=['Speakers and displays'], + brands=['Google'], + price_info=price_info, + fulfillment_info=[fulfillment_info], + availability="IN_STOCK", + ) + + +def create_product(product_id: str) -> object: + create_product_request = CreateProductRequest() + create_product_request.product = generate_product() + create_product_request.product_id = product_id + create_product_request.parent = default_branch_name + + created_product = ProductServiceClient().create_product( + create_product_request) + print("---product is created:---") + print(created_product) + + return created_product + + +def delete_product(product_name: str): + delete_product_request = DeleteProductRequest() + delete_product_request.name = product_name + ProductServiceClient().delete_product(delete_product_request) + + print("---product " + product_name + " was deleted:---") + + +def get_product(product_name: str): + get_product_request = GetProductRequest() + get_product_request.name = product_name + try: + product = ProductServiceClient().get_product(get_product_request) + print("---get product response:---") + print(product) + return product + except NotFound as e: + print(e.message) + return e.message + + +def try_to_delete_product_if_exists(product_name: str): + get_product_request = GetProductRequest() + get_product_request.name = product_name + delete_product_request = DeleteProductRequest() + delete_product_request.name = product_name + print( + "---delete product from the catalog, if the product already exists---") + try: + product = ProductServiceClient().get_product(get_product_request) + ProductServiceClient().delete_product(product.name) + except NotFound as e: + print(e.message) + + +def create_bucket(bucket_name: str): + """Create a new bucket in Cloud Storage""" + print("Creating new bucket:" + bucket_name) + buckets_in_your_project = str(list_buckets()) + if bucket_name in buckets_in_your_project: + print("Bucket {} already exists".format(bucket_name)) + else: + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + bucket.storage_class = "STANDARD" + new_bucket = storage_client.create_bucket(bucket, location="us") + print( + "Created bucket {} in {} with storage class {}".format( + new_bucket.name, new_bucket.location, new_bucket.storage_class + ) + ) + return new_bucket + + +def delete_bucket(bucket_name: str): + """Delete a bucket from Cloud Storage""" + storage_client = storage.Client() + print("Deleting bucket name:" + bucket_name) + buckets_in_your_project = str(list_buckets()) + if bucket_name in buckets_in_your_project: + blobs = storage_client.list_blobs(bucket_name) + for blob in blobs: + blob.delete() + bucket = storage_client.get_bucket(bucket_name) + bucket.delete() + print("Bucket {} is deleted".format(bucket.name)) + else: + print("Bucket {} is not found".format(bucket_name)) + + +def list_buckets(): + """Lists all buckets""" + bucket_list = [] + storage_client = storage.Client() + buckets = storage_client.list_buckets() + for bucket in buckets: + bucket_list.append(str(bucket)) + return bucket_list + + +def upload_blob(bucket_name, source_file_name): + """Uploads a file to the bucket.""" + # The path to your file to upload + # source_file_name = "local/path/to/file" + print("Uploading data from {} to the bucket {}".format(source_file_name, + bucket_name)) + storage_client = storage.Client() + bucket = storage_client.bucket(bucket_name) + object_name = re.search('resources/(.*?)$', source_file_name).group(1) + blob = bucket.blob(object_name) + blob.upload_from_filename(source_file_name) + + print( + "File {} uploaded to {}.".format( + source_file_name, object_name + ) + ) + + +def create_bq_dataset(dataset_name): + """Create a BigQuery dataset""" + full_dataset_id = f"{project_id}.{dataset_name}" + bq = bigquery.Client() + print(f"Creating dataset {full_dataset_id}") + try: + bq.get_dataset(full_dataset_id) + print(f"dataset {full_dataset_id} already exists") + except NotFound: + # Construct a Dataset object to send to the API. + dataset = bq.Dataset(full_dataset_id) + dataset.location = "US" + bq.create_dataset(dataset) + print("dataset is created") + + +def create_bq_table(dataset, table_name, schema_file_path): + """Create a BigQuery table""" + full_table_id = f"{project_id}.{dataset}.{table_name}" + bq = bigquery.Client() + print(f"Creating BigQuery table {full_table_id}") + try: + bq.get_table(full_table_id) + print(f"table {full_table_id} already exists") + except NotFound: + # Construct a Table object to send to the API. + with open(schema_file_path, "rb") as schema: + schema_dict = json.load(schema) + table = bigquery.Table(full_table_id, schema=schema_dict) + bq.create_table(table) + print("table is created") + + +def upload_data_to_bq_table(dataset, table_name, source, schema_file_path): + """Upload data to the table from specified source file""" + full_table_id = f"{project_id}.{dataset}.{table_name}" + bq = bigquery.Client() + print(f"Uploading data from {source} to the table {full_table_id}") + with open(schema_file_path, "rb") as schema: + schema_dict = json.load(schema) + job_config = bigquery.LoadJobConfig(source_format=bigquery.SourceFormat.NEWLINE_DELIMITED_JSON, schema=schema_dict) + with open(source, "rb") as source_file: + job = bq.load_table_from_file(source_file, full_table_id, job_config=job_config) + job.result() # Waits for the job to complete. + print("data was uploaded") diff --git a/generated_samples/interactive-tutorials/test_resources_recovery/create_test_resources.py b/generated_samples/interactive-tutorials/test_resources_recovery/create_test_resources.py index 604a22cf06ac..36807af308ca 100644 --- a/generated_samples/interactive-tutorials/test_resources_recovery/create_test_resources.py +++ b/generated_samples/interactive-tutorials/test_resources_recovery/create_test_resources.py @@ -28,7 +28,7 @@ project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"] products_bucket_name = os.environ['BUCKET_NAME'] events_bucket_name = os.environ['EVENTS_BUCKET_NAME'] -project_id = os.environ["GOOGLE_CLOUD_PROJECT_ID"] +project_id = os.environ["GOOGLE_CLOUD_PROJECT"] product_resource_file = "../resources/products.json" events_source_file = "../resources/user_events.json" @@ -41,8 +41,7 @@ events_schema = "../resources/events_schema.json" object_name = re.search('resources/(.*?)$', product_resource_file).group(1) -default_catalog = "projects/{0}/locations/global/catalogs/default_catalog/branches/default_branch".format( - project_number) +default_catalog = f"projects/{project_number}/locations/global/catalogs/default_catalog/branches/default_branch" storage_client = storage.Client() @@ -120,7 +119,7 @@ def import_products_from_gcs(): while not gcs_operation.done(): print("Please wait till operation is completed") - time.sleep(5) + time.sleep(30) print("Import products operation is completed") if gcs_operation.metadata is not None: @@ -138,22 +137,21 @@ def import_products_from_gcs(): def create_bq_dataset(dataset_name): """Create a BigQuery dataset""" print("Creating dataset {}".format(dataset_name)) - if dataset_name not in list_bq_datasets(): + try: + list_bq_dataset(project_id, dataset_name) + print("dataset {} already exists".format(dataset_name)) + except subprocess.CalledProcessError: create_dataset_command = 'bq --location=US mk -d --default_table_expiration 3600 --description "This is my dataset." {}:{}'.format( project_id, dataset_name) - output = subprocess.check_output(shlex.split(create_dataset_command)) - print(output) + subprocess.check_output(shlex.split(create_dataset_command)) print("dataset is created") - else: - print("dataset {} already exists".format(dataset_name)) -def list_bq_datasets(): - """List BigQuery datasets in the project""" - list_dataset_command = "bq ls --project_id {}".format(project_id) - list_output = subprocess.check_output(shlex.split(list_dataset_command)) - datasets = re.split(r'\W+', str(list_output)) - return datasets +def list_bq_dataset(project_id: str, dataset_name: str): + """List BigQuery dataset in the project""" + list_dataset_command = f"bq show {project_id}:{dataset_name}" + dataset_name = subprocess.check_output(shlex.split(list_dataset_command)) + return str(dataset_name) def create_bq_table(dataset, table_name, schema): @@ -180,7 +178,7 @@ def list_bq_tables(dataset): def upload_data_to_bq_table(dataset, table_name, source, schema): """Upload data to the table from specified source file""" - print("Uploading data form {} to the table {}.{}".format(source, dataset, + print("Uploading data from {} to the table {}.{}".format(source, dataset, table_name)) upload_data_command = "bq load --source_format=NEWLINE_DELIMITED_JSON {}:{}.{} {} {}".format( project_id, dataset, table_name, source, schema) diff --git a/generated_samples/interactive-tutorials/test_resources_recovery/remove_test_resources.py b/generated_samples/interactive-tutorials/test_resources_recovery/remove_test_resources.py index 41981ba2c519..0877b4bc2040 100644 --- a/generated_samples/interactive-tutorials/test_resources_recovery/remove_test_resources.py +++ b/generated_samples/interactive-tutorials/test_resources_recovery/remove_test_resources.py @@ -24,15 +24,14 @@ ProductServiceClient project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"] +project_id = os.environ["GOOGLE_CLOUD_PROJECT"] product_bucket_name = os.environ['BUCKET_NAME'] events_bucket_name = os.environ['EVENTS_BUCKET_NAME'] -project_id = os.environ["GOOGLE_CLOUD_PROJECT_ID"] product_dataset = "products" events_dataset = "user_events" -default_catalog = "projects/{0}/locations/global/catalogs/default_catalog/branches/default_branch".format( - project_number) +default_catalog = f"projects/{project_number}/locations/global/catalogs/default_catalog/branches/default_branch" storage_client = storage.Client()