From b22b4ed56dfdddb818749240db8f1a279482f975 Mon Sep 17 00:00:00 2001 From: Leonid Emar-Kar Date: Wed, 16 Oct 2019 18:04:09 +0300 Subject: [PATCH 01/14] refactor(bigquery): update code samples --- bigquery/docs/snippets.py | 428 ------------------ bigquery/samples/client_query.py | 3 +- bigquery/samples/client_query_add_column.py | 54 +++ bigquery/samples/client_query_batch.py | 45 ++ .../samples/client_query_destination_table.py | 46 ++ .../client_query_destination_table_cmek.py | 50 ++ .../client_query_destination_table_legacy.py | 51 +++ bigquery/samples/client_query_dry_run.py | 43 ++ bigquery/samples/client_query_legacy_sql.py | 44 ++ bigquery/samples/client_query_relax_column.py | 57 +++ bigquery/samples/copy_table.py | 4 +- bigquery/samples/copy_table_cmek.py | 50 ++ .../samples/copy_table_multiple_source.py | 39 ++ bigquery/samples/load_table_dataframe.py | 2 +- .../tests/test_client_query_add_column.py | 33 ++ ..._routine.py => test_client_query_batch.py} | 8 +- ...=> test_client_query_destination_table.py} | 9 +- ...st_client_query_destination_table_cmek.py} | 14 +- ...t_client_query_destination_table_legacy.py | 25 + .../tests/test_client_query_dry_run.py | 25 + ...ine.py => test_client_query_legacy_sql.py} | 8 +- .../tests/test_client_query_relax_column.py | 33 ++ ...st_routines.py => test_copy_table_cmek.py} | 9 +- .../tests/test_copy_table_multiple_source.py | 51 +++ ...routine_ddl.py => test_routine_samples.py} | 47 +- 25 files changed, 720 insertions(+), 458 deletions(-) create mode 100644 bigquery/samples/client_query_add_column.py create mode 100644 bigquery/samples/client_query_batch.py create mode 100644 bigquery/samples/client_query_destination_table.py create mode 100644 bigquery/samples/client_query_destination_table_cmek.py create mode 100644 bigquery/samples/client_query_destination_table_legacy.py create mode 100644 bigquery/samples/client_query_dry_run.py create mode 100644 bigquery/samples/client_query_legacy_sql.py create mode 100644 bigquery/samples/client_query_relax_column.py create mode 100644 bigquery/samples/copy_table_cmek.py create mode 100644 bigquery/samples/copy_table_multiple_source.py create mode 100644 bigquery/samples/tests/test_client_query_add_column.py rename bigquery/samples/tests/{test_create_routine.py => test_client_query_batch.py} (74%) rename bigquery/samples/tests/{test_update_routine.py => test_client_query_destination_table.py} (65%) rename bigquery/samples/tests/{test_get_routine.py => test_client_query_destination_table_cmek.py} (64%) create mode 100644 bigquery/samples/tests/test_client_query_destination_table_legacy.py create mode 100644 bigquery/samples/tests/test_client_query_dry_run.py rename bigquery/samples/tests/{test_delete_routine.py => test_client_query_legacy_sql.py} (75%) create mode 100644 bigquery/samples/tests/test_client_query_relax_column.py rename bigquery/samples/tests/{test_list_routines.py => test_copy_table_cmek.py} (71%) create mode 100644 bigquery/samples/tests/test_copy_table_multiple_source.py rename bigquery/samples/tests/{test_create_routine_ddl.py => test_routine_samples.py} (62%) diff --git a/bigquery/docs/snippets.py b/bigquery/docs/snippets.py index f76c645660bb..6c36b8b73478 100644 --- a/bigquery/docs/snippets.py +++ b/bigquery/docs/snippets.py @@ -1226,106 +1226,6 @@ def test_load_table_relax_column(client, to_delete): assert table.num_rows > 0 -def test_copy_table_multiple_source(client, to_delete): - dest_dataset_id = "dest_dataset_{}".format(_millis()) - dest_dataset = bigquery.Dataset(client.dataset(dest_dataset_id)) - dest_dataset.location = "US" - dest_dataset = client.create_dataset(dest_dataset) - to_delete.append(dest_dataset) - - source_dataset_id = "source_dataset_{}".format(_millis()) - source_dataset = bigquery.Dataset(client.dataset(source_dataset_id)) - source_dataset.location = "US" - source_dataset = client.create_dataset(source_dataset) - to_delete.append(source_dataset) - - schema = [ - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("post_abbr", "STRING"), - ] - - table_data = {"table1": b"Washington,WA", "table2": b"California,CA"} - for table_id, data in table_data.items(): - table_ref = source_dataset.table(table_id) - job_config = bigquery.LoadJobConfig() - job_config.schema = schema - body = six.BytesIO(data) - client.load_table_from_file( - body, - table_ref, - # Location must match that of the destination dataset. - location="US", - job_config=job_config, - ).result() - - # [START bigquery_copy_table_multiple_source] - # from google.cloud import bigquery - # client = bigquery.Client() - # source_dataset_id = 'my_source_dataset' - # dest_dataset_id = 'my_destination_dataset' - - table1_ref = client.dataset(source_dataset_id).table("table1") - table2_ref = client.dataset(source_dataset_id).table("table2") - dest_table_ref = client.dataset(dest_dataset_id).table("destination_table") - - job = client.copy_table( - [table1_ref, table2_ref], - dest_table_ref, - # Location must match that of the source and destination tables. - location="US", - ) # API request - job.result() # Waits for job to complete. - - assert job.state == "DONE" - dest_table = client.get_table(dest_table_ref) # API request - assert dest_table.num_rows > 0 - # [END bigquery_copy_table_multiple_source] - - assert dest_table.num_rows == 2 - - -def test_copy_table_cmek(client, to_delete): - dataset_id = "copy_table_cmek_{}".format(_millis()) - dest_dataset = bigquery.Dataset(client.dataset(dataset_id)) - dest_dataset.location = "US" - dest_dataset = client.create_dataset(dest_dataset) - to_delete.append(dest_dataset) - - # [START bigquery_copy_table_cmek] - # from google.cloud import bigquery - # client = bigquery.Client() - - source_dataset = bigquery.DatasetReference("bigquery-public-data", "samples") - source_table_ref = source_dataset.table("shakespeare") - - # dataset_id = 'my_dataset' - dest_dataset_ref = client.dataset(dataset_id) - dest_table_ref = dest_dataset_ref.table("destination_table") - - # Set the encryption key to use for the destination. - # TODO: Replace this key with a key you have created in KMS. - kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us-central1", "test", "test" - ) - encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) - job_config = bigquery.CopyJobConfig() - job_config.destination_encryption_configuration = encryption_config - - job = client.copy_table( - source_table_ref, - dest_table_ref, - # Location must match that of the source and destination tables. - location="US", - job_config=job_config, - ) # API request - job.result() # Waits for job to complete. - - assert job.state == "DONE" - dest_table = client.get_table(dest_table_ref) - assert dest_table.encryption_configuration.kms_key_name == kms_key_name - # [END bigquery_copy_table_cmek] - - def test_extract_table(client, to_delete): bucket_name = "extract_shakespeare_{}".format(_millis()) storage_client = storage.Client() @@ -1492,35 +1392,6 @@ def test_undelete_table(client, to_delete): # [END bigquery_undelete_table] -def test_client_query_legacy_sql(client): - """Run a query with Legacy SQL explicitly set""" - # [START bigquery_query_legacy] - # from google.cloud import bigquery - # client = bigquery.Client() - - query = ( - "SELECT name FROM [bigquery-public-data:usa_names.usa_1910_2013] " - 'WHERE state = "TX" ' - "LIMIT 100" - ) - - # Set use_legacy_sql to True to use legacy SQL syntax. - job_config = bigquery.QueryJobConfig() - job_config.use_legacy_sql = True - - query_job = client.query( - query, - # Location must match that of the dataset(s) referenced in the query. - location="US", - job_config=job_config, - ) # API request - starts the query - - # Print the results. - for row in query_job: # API request - fetches results - print(row) - # [END bigquery_query_legacy] - - def test_client_query_total_rows(client, capsys): """Run a query and just check for how many rows.""" # [START bigquery_query_total_rows] @@ -1585,273 +1456,6 @@ def test_manage_job(client): # [END bigquery_get_job] -def test_client_query_destination_table(client, to_delete): - """Run a query""" - dataset_id = "query_destination_table_{}".format(_millis()) - dataset_ref = client.dataset(dataset_id) - to_delete.append(dataset_ref) - dataset = bigquery.Dataset(dataset_ref) - dataset.location = "US" - client.create_dataset(dataset) - - # [START bigquery_query_destination_table] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_id = 'your_dataset_id' - - job_config = bigquery.QueryJobConfig() - # Set the destination table - table_ref = client.dataset(dataset_id).table("your_table_id") - job_config.destination = table_ref - sql = """ - SELECT corpus - FROM `bigquery-public-data.samples.shakespeare` - GROUP BY corpus; - """ - - # Start the query, passing in the extra configuration. - query_job = client.query( - sql, - # Location must match that of the dataset(s) referenced in the query - # and of the destination table. - location="US", - job_config=job_config, - ) # API request - starts the query - - query_job.result() # Waits for the query to finish - print("Query results loaded to table {}".format(table_ref.path)) - # [END bigquery_query_destination_table] - - -def test_client_query_destination_table_legacy(client, to_delete): - dataset_id = "query_destination_table_legacy_{}".format(_millis()) - dataset_ref = client.dataset(dataset_id) - to_delete.append(dataset_ref) - dataset = bigquery.Dataset(dataset_ref) - dataset.location = "US" - client.create_dataset(dataset) - - # [START bigquery_query_legacy_large_results] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_id = 'your_dataset_id' - - job_config = bigquery.QueryJobConfig() - # Set use_legacy_sql to True to use legacy SQL syntax. - job_config.use_legacy_sql = True - # Set the destination table - table_ref = client.dataset(dataset_id).table("your_table_id") - job_config.destination = table_ref - job_config.allow_large_results = True - sql = """ - SELECT corpus - FROM [bigquery-public-data:samples.shakespeare] - GROUP BY corpus; - """ - # Start the query, passing in the extra configuration. - query_job = client.query( - sql, - # Location must match that of the dataset(s) referenced in the query - # and of the destination table. - location="US", - job_config=job_config, - ) # API request - starts the query - - query_job.result() # Waits for the query to finish - print("Query results loaded to table {}".format(table_ref.path)) - # [END bigquery_query_legacy_large_results] - - -def test_client_query_destination_table_cmek(client, to_delete): - """Run a query""" - dataset_id = "query_destination_table_{}".format(_millis()) - dataset_ref = client.dataset(dataset_id) - to_delete.append(dataset_ref) - dataset = bigquery.Dataset(dataset_ref) - dataset.location = "US" - client.create_dataset(dataset) - - # [START bigquery_query_destination_table_cmek] - # from google.cloud import bigquery - # client = bigquery.Client() - - job_config = bigquery.QueryJobConfig() - - # Set the destination table. Here, dataset_id is a string, such as: - # dataset_id = 'your_dataset_id' - table_ref = client.dataset(dataset_id).table("your_table_id") - job_config.destination = table_ref - - # Set the encryption key to use for the destination. - # TODO: Replace this key with a key you have created in KMS. - kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us-central1", "test", "test" - ) - encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) - job_config.destination_encryption_configuration = encryption_config - - # Start the query, passing in the extra configuration. - query_job = client.query( - "SELECT 17 AS my_col;", - # Location must match that of the dataset(s) referenced in the query - # and of the destination table. - location="US", - job_config=job_config, - ) # API request - starts the query - query_job.result() - - # The destination table is written using the encryption configuration. - table = client.get_table(table_ref) - assert table.encryption_configuration.kms_key_name == kms_key_name - # [END bigquery_query_destination_table_cmek] - - -def test_client_query_batch(client, to_delete): - # [START bigquery_query_batch] - # from google.cloud import bigquery - # client = bigquery.Client() - - job_config = bigquery.QueryJobConfig() - # Run at batch priority, which won't count toward concurrent rate limit. - job_config.priority = bigquery.QueryPriority.BATCH - sql = """ - SELECT corpus - FROM `bigquery-public-data.samples.shakespeare` - GROUP BY corpus; - """ - # Location must match that of the dataset(s) referenced in the query. - location = "US" - - # API request - starts the query - query_job = client.query(sql, location=location, job_config=job_config) - - # Check on the progress by getting the job's updated state. Once the state - # is `DONE`, the results are ready. - query_job = client.get_job( - query_job.job_id, location=location - ) # API request - fetches job - print("Job {} is currently in state {}".format(query_job.job_id, query_job.state)) - # [END bigquery_query_batch] - - -def test_client_query_relax_column(client, to_delete): - dataset_id = "query_relax_column_{}".format(_millis()) - dataset_ref = client.dataset(dataset_id) - dataset = bigquery.Dataset(dataset_ref) - dataset.location = "US" - dataset = client.create_dataset(dataset) - to_delete.append(dataset) - - table_ref = dataset_ref.table("my_table") - schema = [ - bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - table = client.create_table(bigquery.Table(table_ref, schema=schema)) - - # [START bigquery_relax_column_query_append] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_ref = client.dataset('my_dataset') - - # Retrieves the destination table and checks the number of required fields - table_id = "my_table" - table_ref = dataset_ref.table(table_id) - table = client.get_table(table_ref) - original_required_fields = sum(field.mode == "REQUIRED" for field in table.schema) - # In this example, the existing table has 2 required fields - print("{} fields in the schema are required.".format(original_required_fields)) - - # Configures the query to append the results to a destination table, - # allowing field relaxation - job_config = bigquery.QueryJobConfig() - job_config.schema_update_options = [ - bigquery.SchemaUpdateOption.ALLOW_FIELD_RELAXATION - ] - job_config.destination = table_ref - job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND - - query_job = client.query( - # In this example, the existing table contains 'full_name' and 'age' as - # required columns, but the query results will omit the second column. - 'SELECT "Beyonce" as full_name;', - # Location must match that of the dataset(s) referenced in the query - # and of the destination table. - location="US", - job_config=job_config, - ) # API request - starts the query - - query_job.result() # Waits for the query to finish - print("Query job {} complete.".format(query_job.job_id)) - - # Checks the updated number of required fields - table = client.get_table(table) - current_required_fields = sum(field.mode == "REQUIRED" for field in table.schema) - print("{} fields in the schema are now required.".format(current_required_fields)) - # [END bigquery_relax_column_query_append] - assert original_required_fields - current_required_fields > 0 - assert len(table.schema) == 2 - assert table.schema[1].mode == "NULLABLE" - assert table.num_rows > 0 - - -def test_client_query_add_column(client, to_delete): - dataset_id = "query_add_column_{}".format(_millis()) - dataset_ref = client.dataset(dataset_id) - dataset = bigquery.Dataset(dataset_ref) - dataset.location = "US" - dataset = client.create_dataset(dataset) - to_delete.append(dataset) - - table_ref = dataset_ref.table("my_table") - schema = [ - bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), - bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), - ] - table = client.create_table(bigquery.Table(table_ref, schema=schema)) - - # [START bigquery_add_column_query_append] - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_ref = client.dataset('my_dataset') - - # Retrieves the destination table and checks the length of the schema - table_id = "my_table" - table_ref = dataset_ref.table(table_id) - table = client.get_table(table_ref) - print("Table {} contains {} columns.".format(table_id, len(table.schema))) - - # Configures the query to append the results to a destination table, - # allowing field addition - job_config = bigquery.QueryJobConfig() - job_config.schema_update_options = [ - bigquery.SchemaUpdateOption.ALLOW_FIELD_ADDITION - ] - job_config.destination = table_ref - job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND - - query_job = client.query( - # In this example, the existing table contains only the 'full_name' and - # 'age' columns, while the results of this query will contain an - # additional 'favorite_color' column. - 'SELECT "Timmy" as full_name, 85 as age, "Blue" as favorite_color;', - # Location must match that of the dataset(s) referenced in the query - # and of the destination table. - location="US", - job_config=job_config, - ) # API request - starts the query - - query_job.result() # Waits for the query to finish - print("Query job {} complete.".format(query_job.job_id)) - - # Checks the updated length of the schema - table = client.get_table(table) - print("Table {} now contains {} columns.".format(table_id, len(table.schema))) - # [END bigquery_add_column_query_append] - assert len(table.schema) == 3 - assert table.num_rows > 0 - - def test_client_query_w_named_params(client, capsys): """Run a query using named query parameters""" @@ -2042,38 +1646,6 @@ def test_client_query_w_struct_params(client, capsys): assert "foo" in out -def test_client_query_dry_run(client): - """Run a dry run query""" - - # [START bigquery_query_dry_run] - # from google.cloud import bigquery - # client = bigquery.Client() - - job_config = bigquery.QueryJobConfig() - job_config.dry_run = True - job_config.use_query_cache = False - query_job = client.query( - ( - "SELECT name, COUNT(*) as name_count " - "FROM `bigquery-public-data.usa_names.usa_1910_2013` " - "WHERE state = 'WA' " - "GROUP BY name" - ), - # Location must match that of the dataset(s) referenced in the query. - location="US", - job_config=job_config, - ) # API request - - # A dry run query completes immediately. - assert query_job.state == "DONE" - assert query_job.dry_run - - print("This query will process {} bytes.".format(query_job.total_bytes_processed)) - # [END bigquery_query_dry_run] - - assert query_job.total_bytes_processed > 0 - - def test_query_no_cache(client): # [START bigquery_query_no_cache] # from google.cloud import bigquery diff --git a/bigquery/samples/client_query.py b/bigquery/samples/client_query.py index 9dccfd38cbcf..b2cf0c8637f2 100644 --- a/bigquery/samples/client_query.py +++ b/bigquery/samples/client_query.py @@ -31,7 +31,8 @@ def client_query(client): LIMIT 20 """ query_job = client.query( - query, location="US" # Must match the destination dataset(s) location. + query, + location="US", # Must match the source and the destination dataset(s) location. ) # Make an API request. print("The query data:") diff --git a/bigquery/samples/client_query_add_column.py b/bigquery/samples/client_query_add_column.py new file mode 100644 index 000000000000..c26cbe96622d --- /dev/null +++ b/bigquery/samples/client_query_add_column.py @@ -0,0 +1,54 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def client_query_add_column(client, table_id): + + # [START bigquery_add_column_query_append] + from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set table_id to the ID of the destination table. + # table_id = "your-project.your_dataset.your_table_name" + + # Retrieves the destination table and checks the length of the schema. + table = client.get_table(table_id) # Make an API request. + print("Table {} contains {} columns".format(table_id, len(table.schema))) + + # Configures the query to append the results to a destination table, + # allowing field addition. + job_config = bigquery.QueryJobConfig() + job_config.schema_update_options = [ + bigquery.SchemaUpdateOption.ALLOW_FIELD_ADDITION + ] + job_config.destination = table_id + job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND + + # Start the query, passing in the extra configuration. + query_job = client.query( + # In this example, the existing table contains only the 'full_name' and + # 'age' columns, while the results of this query will contain an + # additional 'favorite_color' column. + 'SELECT "Timmy" as full_name, 85 as age, "Blue" as favorite_color;', + location="US", # Must match the source and the destination dataset(s) location. + job_config=job_config, + ) # Make an API request. + query_job.result() # Wait for the job to complete. + + # Checks the updated length of the schema. + table = client.get_table(table_id) # Make an API request. + print("Table {} now contains {} columns".format(table_id, len(table.schema))) + # [END bigquery_add_column_query_append] diff --git a/bigquery/samples/client_query_batch.py b/bigquery/samples/client_query_batch.py new file mode 100644 index 000000000000..683d647f6ee6 --- /dev/null +++ b/bigquery/samples/client_query_batch.py @@ -0,0 +1,45 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def client_query_batch(client): + + # [START bigquery_query_batch] + from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + job_config = bigquery.QueryJobConfig() + + # Run at batch priority, which won't count toward concurrent rate limit. + job_config.priority = bigquery.QueryPriority.BATCH + sql = """ + SELECT corpus + FROM `bigquery-public-data.samples.shakespeare` + GROUP BY corpus; + """ + + # Start the query, passing in the extra configuration. + query_job = client.query( + sql, location="US", job_config=job_config + ) # Make an API request. + + # Check on the progress by getting the job's updated state. Once the state + # is `DONE`, the results are ready. + query_job = client.get_job(query_job.job_id, location="US") # Make an API request. + + print("Job {} is currently in state {}".format(query_job.job_id, query_job.state)) + # [END bigquery_query_batch] + return query_job.job_id diff --git a/bigquery/samples/client_query_destination_table.py b/bigquery/samples/client_query_destination_table.py new file mode 100644 index 000000000000..471667780c41 --- /dev/null +++ b/bigquery/samples/client_query_destination_table.py @@ -0,0 +1,46 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def client_query_destination_table(client, table_id): + + # [START bigquery_query_destination_table] + from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set table_id to the ID of the destination table. + # table_id = "your-project.your_dataset.your_table_name" + + job_config = bigquery.QueryJobConfig() + + table = client.get_table(table_id) # Make an API request. + job_config.destination = table + sql = """ + SELECT corpus + FROM `bigquery-public-data.samples.shakespeare` + GROUP BY corpus; + """ + + # Start the query, passing in the extra configuration. + query_job = client.query( + sql, + location="US", # Must match the source and the destination dataset(s) location. + job_config=job_config, + ) # Make an API request. + query_job.result() # Wait for the job to complete. + + print("Query results loaded to the table {}".format(table_id)) + # [END bigquery_query_destination_table] diff --git a/bigquery/samples/client_query_destination_table_cmek.py b/bigquery/samples/client_query_destination_table_cmek.py new file mode 100644 index 000000000000..e5ebfd87b7c2 --- /dev/null +++ b/bigquery/samples/client_query_destination_table_cmek.py @@ -0,0 +1,50 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def client_query_destination_table_cmek(client, table_id): + + # [START bigquery_query_destination_table_cmek] + from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set table_id to the ID of the destination table. + # table_id = "your-project.your_dataset.your_table_name" + + job_config = bigquery.QueryJobConfig() + job_config.destination = table_id + + # Set the encryption key to use for the destination. + # TODO(developer): Replace this key with a key you have created in KMS. + kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( + "cloud-samples-tests", "us-central1", "test", "test" + ) + + encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) + job_config.destination_encryption_configuration = encryption_config + + # Start the query, passing in the extra configuration. + query_job = client.query( + "SELECT 17 AS my_col;", + location="US", # Must match the source and the destination dataset(s) location. + job_config=job_config, + ) # Make an API request. + query_job.result() # Wait for the job to complete. + + table = client.get_table(table_id) # Make an API request. + if table.encryption_configuration.kms_key_name == kms_key_name: + print("The destination table is written using the encryption configuration") + # [END bigquery_query_destination_table_cmek] diff --git a/bigquery/samples/client_query_destination_table_legacy.py b/bigquery/samples/client_query_destination_table_legacy.py new file mode 100644 index 000000000000..7d30c20f1d6f --- /dev/null +++ b/bigquery/samples/client_query_destination_table_legacy.py @@ -0,0 +1,51 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def client_query_destination_table_legacy(client, table_id): + + # [START bigquery_query_legacy_large_results] + from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set table_id to the ID of the destination table. + # table_id = "your-project.your_dataset.your_table_name" + + job_config = bigquery.QueryJobConfig() + + # Set use_legacy_sql to True to use legacy SQL syntax. + job_config.use_legacy_sql = True + + # Set the destination table. + table = client.get_table(table_id) # Make an API request. + job_config.destination = table + job_config.allow_large_results = True + sql = """ + SELECT corpus + FROM [bigquery-public-data:samples.shakespeare] + GROUP BY corpus; + """ + + # Start the query, passing in the extra configuration. + query_job = client.query( + sql, + location="US", # Must match the source and the destination dataset(s) location. + job_config=job_config, + ) # Make an API request. + query_job.result() # Wait for the job to complete. + + print("Query results loaded to the table {}".format(table_id)) + # [END bigquery_query_legacy_large_results] diff --git a/bigquery/samples/client_query_dry_run.py b/bigquery/samples/client_query_dry_run.py new file mode 100644 index 000000000000..4094d5de4dcf --- /dev/null +++ b/bigquery/samples/client_query_dry_run.py @@ -0,0 +1,43 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def client_query_dry_run(client): + + # [START bigquery_query_dry_run] + from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + job_config = bigquery.QueryJobConfig() + job_config.dry_run = True + job_config.use_query_cache = False + + # Start the query, passing in the extra configuration. + query_job = client.query( + ( + "SELECT name, COUNT(*) as name_count " + "FROM `bigquery-public-data.usa_names.usa_1910_2013` " + "WHERE state = 'WA' " + "GROUP BY name" + ), + location="US", # Must match the source and the destination dataset(s) location. + job_config=job_config, + ) # Make an API request. + + # A dry run query completes immediately. + print("This query will process {} bytes.".format(query_job.total_bytes_processed)) + # [END bigquery_query_dry_run] + return query_job diff --git a/bigquery/samples/client_query_legacy_sql.py b/bigquery/samples/client_query_legacy_sql.py new file mode 100644 index 000000000000..8400a9acc60a --- /dev/null +++ b/bigquery/samples/client_query_legacy_sql.py @@ -0,0 +1,44 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def client_query_legacy_sql(client): + + # [START bigquery_query_legacy] + from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + query = ( + "SELECT name FROM [bigquery-public-data:usa_names.usa_1910_2013] " + 'WHERE state = "TX" ' + "LIMIT 100" + ) + + # Set use_legacy_sql to True to use legacy SQL syntax. + job_config = bigquery.QueryJobConfig() + job_config.use_legacy_sql = True + + # Start the query, passing in the extra configuration. + query_job = client.query( + query, + location="US", # Must match the source and the destination dataset(s) location. + job_config=job_config, + ) # Make an API request. + + print("The query data:") + for row in query_job: + print(row) + # [END bigquery_query_legacy] diff --git a/bigquery/samples/client_query_relax_column.py b/bigquery/samples/client_query_relax_column.py new file mode 100644 index 000000000000..4b3a5080df6c --- /dev/null +++ b/bigquery/samples/client_query_relax_column.py @@ -0,0 +1,57 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def client_query_relax_column(client, table_id): + + # [START bigquery_relax_column_query_append] + from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set table_id to the ID of the destination table. + # table_id = "your-project.your_dataset.your_table_name" + + # Retrieves the destination table and checks the number of required fields. + table = client.get_table(table_id) # Make an API request. + original_required_fields = sum(field.mode == "REQUIRED" for field in table.schema) + + # In this example, the existing table has 2 required fields. + print("{} fields in the schema are required.".format(original_required_fields)) + + # Configures the query to append the results to a destination table, + # allowing field relaxation. + job_config = bigquery.QueryJobConfig() + job_config.schema_update_options = [ + bigquery.SchemaUpdateOption.ALLOW_FIELD_RELAXATION + ] + job_config.destination = table + job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND + + # Start the query, passing in the extra configuration. + query_job = client.query( + # In this example, the existing table contains 'full_name' and 'age' as + # required columns, but the query results will omit the second column. + 'SELECT "Beyonce" as full_name;', + location="US", # Must match the source and the destination dataset(s) location. + job_config=job_config, + ) # Make an API request. + query_job.result() # Wait for the job to complete. + + # Checks the updated number of required fields. + table = client.get_table(table_id) # Make an API request. + current_required_fields = sum(field.mode == "REQUIRED" for field in table.schema) + print("{} fields in the schema are now required.".format(current_required_fields)) + # [END bigquery_relax_column_query_append] diff --git a/bigquery/samples/copy_table.py b/bigquery/samples/copy_table.py index f6ebd91470eb..28fb5ceb0cb8 100644 --- a/bigquery/samples/copy_table.py +++ b/bigquery/samples/copy_table.py @@ -31,9 +31,9 @@ def copy_table(client, source_table_id, destination_table_id): job = client.copy_table( source_table_id, destination_table_id, - location="US", # Must match the source and destination tables location. + location="US", # Must match the source and the destination dataset(s) location. ) - job.result() # Waits for job to complete. + job.result() # Wait for the job to complete. print("A copy of the table created.") # [END bigquery_copy_table] diff --git a/bigquery/samples/copy_table_cmek.py b/bigquery/samples/copy_table_cmek.py new file mode 100644 index 000000000000..2ee2381888c4 --- /dev/null +++ b/bigquery/samples/copy_table_cmek.py @@ -0,0 +1,50 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def copy_table_cmek(client, dest_table_id, orig_table_id): + + # [START bigquery_copy_table_cmek] + from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set dest_table_id to the ID of the destination table. + # dest_table_id = "your-project.your_dataset.your_table_name" + + # TODO(developer): Set orig_table_id to the ID of the original table. + # orig_table_id = "your-project.your_dataset.your_table_name" + + # Set the encryption key to use for the destination. + # TODO(developer): Replace this key with a key you have created in KMS. + kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( + "cloud-samples-tests", "us-central1", "test", "test" + ) + + encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) + job_config = bigquery.CopyJobConfig() + job_config.destination_encryption_configuration = encryption_config + job = client.copy_table( + orig_table_id, + dest_table_id, + location="US", # Must match the source and the destination dataset(s) location. + job_config=job_config, + ) + job.result() # Wait for the job to complete. + + dest_table = client.get_table(dest_table_id) # Make an API request. + if dest_table.encryption_configuration.kms_key_name == kms_key_name: + print("A copy of the table created") + # [END bigquery_copy_table_cmek] diff --git a/bigquery/samples/copy_table_multiple_source.py b/bigquery/samples/copy_table_multiple_source.py new file mode 100644 index 000000000000..e178e3d747b8 --- /dev/null +++ b/bigquery/samples/copy_table_multiple_source.py @@ -0,0 +1,39 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def copy_table_multiple_source(client, dest_table_id, tables_ids): + + # [START bigquery_copy_table_multiple_source] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set dest_table_id to the ID of the destination table. + # dest_table_id = "your-project.your_dataset.your_table_name" + + # TODO(developer): Set tables_ids to the list of the IDs of the original tables. + # tables_ids = ["your-project.your_dataset.your_table_name", ...] + + job = client.copy_table( + tables_ids, + dest_table_id, + location="US", # Must match the source and the destination dataset(s) location. + ) # Make an API request. + job.result() # Wait for the job to complete. + + print("A copy of {} tables has been created".format(len(tables_ids))) + # [END bigquery_copy_table_multiple_source] diff --git a/bigquery/samples/load_table_dataframe.py b/bigquery/samples/load_table_dataframe.py index ea6fe5d02384..7133b0bfbcc1 100644 --- a/bigquery/samples/load_table_dataframe.py +++ b/bigquery/samples/load_table_dataframe.py @@ -64,7 +64,7 @@ def load_table_dataframe(client, table_id): dataframe, table_id, job_config=job_config, - location="US", # Must match the destination dataset location. + location="US", # Must match the source and the destination dataset(s) location. ) # Make an API request. job.result() # Wait for the job to complete. diff --git a/bigquery/samples/tests/test_client_query_add_column.py b/bigquery/samples/tests/test_client_query_add_column.py new file mode 100644 index 000000000000..7fe68f237a87 --- /dev/null +++ b/bigquery/samples/tests/test_client_query_add_column.py @@ -0,0 +1,33 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from google.cloud import bigquery + +from .. import client_query_add_column + + +def test_client_query_add_column(capsys, client, random_table_id): + + schema = [ + bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), + ] + + client.create_table(bigquery.Table(random_table_id, schema=schema)) + + client_query_add_column.client_query_add_column(client, random_table_id) + out, err = capsys.readouterr() + assert "Table {} contains 2 columns".format(random_table_id) in out + assert "Table {} now contains 3 columns".format(random_table_id) in out diff --git a/bigquery/samples/tests/test_create_routine.py b/bigquery/samples/tests/test_client_query_batch.py similarity index 74% rename from bigquery/samples/tests/test_create_routine.py rename to bigquery/samples/tests/test_client_query_batch.py index 7220d63542e2..ceafff0687a3 100644 --- a/bigquery/samples/tests/test_create_routine.py +++ b/bigquery/samples/tests/test_client_query_batch.py @@ -13,11 +13,11 @@ # limitations under the License. -from .. import create_routine +from .. import client_query_batch -def test_create_routine(capsys, client, random_routine_id): +def test_client_query_batch(capsys, client): - create_routine.create_routine(client, random_routine_id) + job_id = client_query_batch.client_query_batch(client) out, err = capsys.readouterr() - assert "Created routine {}".format(random_routine_id) in out + assert "Job {} is currently in state DONE".format(job_id) in out diff --git a/bigquery/samples/tests/test_update_routine.py b/bigquery/samples/tests/test_client_query_destination_table.py similarity index 65% rename from bigquery/samples/tests/test_update_routine.py rename to bigquery/samples/tests/test_client_query_destination_table.py index 8adfab32e032..1c3dd2c5c38b 100644 --- a/bigquery/samples/tests/test_update_routine.py +++ b/bigquery/samples/tests/test_client_query_destination_table.py @@ -13,10 +13,11 @@ # limitations under the License. -from .. import update_routine +from .. import client_query_destination_table -def test_update_routine(client, routine_id): +def test_client_query_destination_table(capsys, client, table_id): - routine = update_routine.update_routine(client, routine_id) - assert routine.body == "x * 4" + client_query_destination_table.client_query_destination_table(client, table_id) + out, err = capsys.readouterr() + assert "Query results loaded to the table {}".format(table_id) in out diff --git a/bigquery/samples/tests/test_get_routine.py b/bigquery/samples/tests/test_client_query_destination_table_cmek.py similarity index 64% rename from bigquery/samples/tests/test_get_routine.py rename to bigquery/samples/tests/test_client_query_destination_table_cmek.py index fa5f3093116c..975f27bf5279 100644 --- a/bigquery/samples/tests/test_get_routine.py +++ b/bigquery/samples/tests/test_client_query_destination_table_cmek.py @@ -13,15 +13,13 @@ # limitations under the License. -from .. import get_routine +from .. import client_query_destination_table_cmek -def test_get_routine(capsys, client, routine_id): +def test_client_query_destination_table_cmek(capsys, client, random_table_id): - get_routine.get_routine(client, routine_id) + client_query_destination_table_cmek.client_query_destination_table_cmek( + client, random_table_id + ) out, err = capsys.readouterr() - assert "Routine '{}':".format(routine_id) in out - assert "Type: 'SCALAR_FUNCTION'" in out - assert "Language: 'SQL'" in out - assert "Name: 'x'" in out - assert "Type: 'type_kind: INT64\n'" in out + assert "The destination table is written using the encryption configuration" in out diff --git a/bigquery/samples/tests/test_client_query_destination_table_legacy.py b/bigquery/samples/tests/test_client_query_destination_table_legacy.py new file mode 100644 index 000000000000..1e9dcc96d057 --- /dev/null +++ b/bigquery/samples/tests/test_client_query_destination_table_legacy.py @@ -0,0 +1,25 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import client_query_destination_table_legacy + + +def test_client_query_destination_table_legacy(capsys, client, table_id): + + client_query_destination_table_legacy.client_query_destination_table_legacy( + client, table_id + ) + out, err = capsys.readouterr() + assert "Query results loaded to the table {}".format(table_id) in out diff --git a/bigquery/samples/tests/test_client_query_dry_run.py b/bigquery/samples/tests/test_client_query_dry_run.py new file mode 100644 index 000000000000..2da94ae9f633 --- /dev/null +++ b/bigquery/samples/tests/test_client_query_dry_run.py @@ -0,0 +1,25 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import client_query_dry_run + + +def test_client_query_dry_run(capsys, client): + + query_job = client_query_dry_run.client_query_dry_run(client) + out, err = capsys.readouterr() + assert "This query will process 65935918 bytes." in out + assert query_job.state == "DONE" + assert query_job.dry_run diff --git a/bigquery/samples/tests/test_delete_routine.py b/bigquery/samples/tests/test_client_query_legacy_sql.py similarity index 75% rename from bigquery/samples/tests/test_delete_routine.py rename to bigquery/samples/tests/test_client_query_legacy_sql.py index 9347d1e22dc2..2ae544e1ecdb 100644 --- a/bigquery/samples/tests/test_delete_routine.py +++ b/bigquery/samples/tests/test_client_query_legacy_sql.py @@ -13,11 +13,11 @@ # limitations under the License. -from .. import delete_routine +from .. import client_query_legacy_sql -def test_delete_routine(capsys, client, routine_id): +def test_client_query_legacy_sql(capsys, client): - delete_routine.delete_routine(client, routine_id) + client_query_legacy_sql.client_query_legacy_sql(client) out, err = capsys.readouterr() - assert "Deleted routine {}.".format(routine_id) in out + assert "Row(('Frances',), {'name': 0})" in out diff --git a/bigquery/samples/tests/test_client_query_relax_column.py b/bigquery/samples/tests/test_client_query_relax_column.py new file mode 100644 index 000000000000..4db50da07376 --- /dev/null +++ b/bigquery/samples/tests/test_client_query_relax_column.py @@ -0,0 +1,33 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from google.cloud import bigquery + +from .. import client_query_relax_column + + +def test_client_query_relax_column(capsys, client, random_table_id): + + schema = [ + bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), + ] + + client.create_table(bigquery.Table(random_table_id, schema=schema)) + + client_query_relax_column.client_query_relax_column(client, random_table_id) + out, err = capsys.readouterr() + assert "2 fields in the schema are required." in out + assert "0 fields in the schema are now required." in out diff --git a/bigquery/samples/tests/test_list_routines.py b/bigquery/samples/tests/test_copy_table_cmek.py similarity index 71% rename from bigquery/samples/tests/test_list_routines.py rename to bigquery/samples/tests/test_copy_table_cmek.py index e249238e1976..4123a658ac41 100644 --- a/bigquery/samples/tests/test_list_routines.py +++ b/bigquery/samples/tests/test_copy_table_cmek.py @@ -13,12 +13,11 @@ # limitations under the License. -from .. import list_routines +from .. import copy_table_cmek -def test_list_routines(capsys, client, dataset_id, routine_id): +def test_copy_table_cmek(capsys, client, random_table_id, table_with_data_id): - list_routines.list_routines(client, dataset_id) + copy_table_cmek.copy_table_cmek(client, random_table_id, table_with_data_id) out, err = capsys.readouterr() - assert "Routines contained in dataset {}:".format(dataset_id) in out - assert routine_id in out + assert "A copy of the table created" in out diff --git a/bigquery/samples/tests/test_copy_table_multiple_source.py b/bigquery/samples/tests/test_copy_table_multiple_source.py new file mode 100644 index 000000000000..81918802a037 --- /dev/null +++ b/bigquery/samples/tests/test_copy_table_multiple_source.py @@ -0,0 +1,51 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import six +from google.cloud import bigquery + +from .. import copy_table_multiple_source + + +def test_copy_table_multiple_source(capsys, client, random_table_id, random_dataset_id): + + schema = [ + bigquery.SchemaField("name", "STRING"), + bigquery.SchemaField("post_abbr", "STRING"), + ] + + dataset = bigquery.Dataset(random_dataset_id) + dataset.location = "US" + dataset = client.create_dataset(dataset) + table_data = {"table1": b"Washington,WA", "table2": b"California,CA"} + for table_id, data in table_data.items(): + table_ref = dataset.table(table_id) + job_config = bigquery.LoadJobConfig() + job_config.schema = schema + body = six.BytesIO(data) + client.load_table_from_file( + body, table_ref, location="US", job_config=job_config + ).result() + + tables_ids = [ + "{}.table1".format(random_dataset_id), + "{}.table2".format(random_dataset_id), + ] + + copy_table_multiple_source.copy_table_multiple_source( + client, random_table_id, tables_ids + ) + out, err = capsys.readouterr() + assert "A copy of 2 tables has been created" in out diff --git a/bigquery/samples/tests/test_create_routine_ddl.py b/bigquery/samples/tests/test_routine_samples.py similarity index 62% rename from bigquery/samples/tests/test_create_routine_ddl.py rename to bigquery/samples/tests/test_routine_samples.py index bcb3249d26ef..a3e82abcc7ea 100644 --- a/bigquery/samples/tests/test_create_routine_ddl.py +++ b/bigquery/samples/tests/test_routine_samples.py @@ -16,15 +16,24 @@ from google.cloud import bigquery from google.cloud import bigquery_v2 -from .. import create_routine_ddl + +def test_create_routine(capsys, client, random_routine_id): + from .. import create_routine + + create_routine.create_routine(client, random_routine_id) + out, err = capsys.readouterr() + assert "Created routine {}".format(random_routine_id) in out def test_create_routine_ddl(capsys, client, random_routine_id): + from .. import create_routine_ddl create_routine_ddl.create_routine_ddl(client, random_routine_id) routine = client.get_routine(random_routine_id) out, err = capsys.readouterr() + assert "Created routine {}".format(random_routine_id) in out + return routine assert routine.type_ == "SCALAR_FUNCTION" assert routine.language == "SQL" expected_arguments = [ @@ -55,3 +64,39 @@ def test_create_routine_ddl(capsys, client, random_routine_id): ) ] assert routine.arguments == expected_arguments + + +def test_list_routines(capsys, client, dataset_id, routine_id): + from .. import list_routines + + list_routines.list_routines(client, dataset_id) + out, err = capsys.readouterr() + assert "Routines contained in dataset {}:".format(dataset_id) in out + assert routine_id in out + + +def test_get_routine(capsys, client, routine_id): + from .. import get_routine + + get_routine.get_routine(client, routine_id) + out, err = capsys.readouterr() + assert "Routine '{}':".format(routine_id) in out + assert "Type: 'SCALAR_FUNCTION'" in out + assert "Language: 'SQL'" in out + assert "Name: 'x'" in out + assert "Type: 'type_kind: INT64\n'" in out + + +def test_delete_routine(capsys, client, routine_id): + from .. import delete_routine + + delete_routine.delete_routine(client, routine_id) + out, err = capsys.readouterr() + assert "Deleted routine {}.".format(routine_id) in out + + +def test_update_routine(client, routine_id): + from .. import update_routine + + routine = update_routine.update_routine(client, routine_id) + assert routine.body == "x * 4" From 44c3e659b630f5897ae861c1b94a60767375161e Mon Sep 17 00:00:00 2001 From: Leonid Emar-Kar Date: Fri, 18 Oct 2019 13:53:37 +0300 Subject: [PATCH 02/14] kms_key_name update --- bigquery/samples/client_query_destination_table_cmek.py | 2 +- bigquery/samples/copy_table_cmek.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery/samples/client_query_destination_table_cmek.py b/bigquery/samples/client_query_destination_table_cmek.py index e5ebfd87b7c2..e83a43882b7f 100644 --- a/bigquery/samples/client_query_destination_table_cmek.py +++ b/bigquery/samples/client_query_destination_table_cmek.py @@ -30,7 +30,7 @@ def client_query_destination_table_cmek(client, table_id): # Set the encryption key to use for the destination. # TODO(developer): Replace this key with a key you have created in KMS. kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us-central1", "test", "test" + "cloud-samples-tests", "us", "test", "test" ) encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) diff --git a/bigquery/samples/copy_table_cmek.py b/bigquery/samples/copy_table_cmek.py index 2ee2381888c4..8f117e043670 100644 --- a/bigquery/samples/copy_table_cmek.py +++ b/bigquery/samples/copy_table_cmek.py @@ -30,7 +30,7 @@ def copy_table_cmek(client, dest_table_id, orig_table_id): # Set the encryption key to use for the destination. # TODO(developer): Replace this key with a key you have created in KMS. kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us-central1", "test", "test" + "cloud-samples-tests", "us", "test", "test" ) encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) From 45d90dee645b9e4066daa3f82118d010affcba04 Mon Sep 17 00:00:00 2001 From: Leonid Emar-Kar Date: Fri, 18 Oct 2019 13:53:37 +0300 Subject: [PATCH 03/14] refactor(bigquery): kms_key_name update --- bigquery/samples/client_query_destination_table_cmek.py | 2 +- bigquery/samples/copy_table_cmek.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery/samples/client_query_destination_table_cmek.py b/bigquery/samples/client_query_destination_table_cmek.py index e5ebfd87b7c2..e83a43882b7f 100644 --- a/bigquery/samples/client_query_destination_table_cmek.py +++ b/bigquery/samples/client_query_destination_table_cmek.py @@ -30,7 +30,7 @@ def client_query_destination_table_cmek(client, table_id): # Set the encryption key to use for the destination. # TODO(developer): Replace this key with a key you have created in KMS. kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us-central1", "test", "test" + "cloud-samples-tests", "us", "test", "test" ) encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) diff --git a/bigquery/samples/copy_table_cmek.py b/bigquery/samples/copy_table_cmek.py index 2ee2381888c4..8f117e043670 100644 --- a/bigquery/samples/copy_table_cmek.py +++ b/bigquery/samples/copy_table_cmek.py @@ -30,7 +30,7 @@ def copy_table_cmek(client, dest_table_id, orig_table_id): # Set the encryption key to use for the destination. # TODO(developer): Replace this key with a key you have created in KMS. kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us-central1", "test", "test" + "cloud-samples-tests", "us", "test", "test" ) encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) From 5af72c871db9c0c2023d3466cea40ad3b444d85a Mon Sep 17 00:00:00 2001 From: Leonid Emar-Kar Date: Mon, 21 Oct 2019 12:43:57 +0300 Subject: [PATCH 04/14] samples update --- bigquery/samples/client_query_batch.py | 2 +- .../samples/client_query_destination_table_cmek.py | 14 +++++++------- bigquery/samples/copy_table_cmek.py | 8 ++++---- bigquery/samples/copy_table_multiple_source.py | 10 +++++----- bigquery/samples/tests/conftest.py | 5 +++++ bigquery/samples/tests/test_client_query_batch.py | 4 ++-- .../test_client_query_destination_table_cmek.py | 6 ++++-- .../samples/tests/test_client_query_dry_run.py | 2 +- .../samples/tests/test_client_query_legacy_sql.py | 2 +- bigquery/samples/tests/test_copy_table_cmek.py | 8 ++++++-- .../tests/test_copy_table_multiple_source.py | 9 ++++++--- 11 files changed, 42 insertions(+), 28 deletions(-) diff --git a/bigquery/samples/client_query_batch.py b/bigquery/samples/client_query_batch.py index 683d647f6ee6..9dab2b70a78e 100644 --- a/bigquery/samples/client_query_batch.py +++ b/bigquery/samples/client_query_batch.py @@ -42,4 +42,4 @@ def client_query_batch(client): print("Job {} is currently in state {}".format(query_job.job_id, query_job.state)) # [END bigquery_query_batch] - return query_job.job_id + return query_job diff --git a/bigquery/samples/client_query_destination_table_cmek.py b/bigquery/samples/client_query_destination_table_cmek.py index e83a43882b7f..a8749ba35384 100644 --- a/bigquery/samples/client_query_destination_table_cmek.py +++ b/bigquery/samples/client_query_destination_table_cmek.py @@ -13,7 +13,7 @@ # limitations under the License. -def client_query_destination_table_cmek(client, table_id): +def client_query_destination_table_cmek(client, table_id, kms_key_name): # [START bigquery_query_destination_table_cmek] from google.cloud import bigquery @@ -24,14 +24,14 @@ def client_query_destination_table_cmek(client, table_id): # TODO(developer): Set table_id to the ID of the destination table. # table_id = "your-project.your_dataset.your_table_name" - job_config = bigquery.QueryJobConfig() - job_config.destination = table_id - # Set the encryption key to use for the destination. # TODO(developer): Replace this key with a key you have created in KMS. - kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us", "test", "test" - ) + # kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( + # your-project, location, your-ring, your-key + # ) + + job_config = bigquery.QueryJobConfig() + job_config.destination = table_id encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) job_config.destination_encryption_configuration = encryption_config diff --git a/bigquery/samples/copy_table_cmek.py b/bigquery/samples/copy_table_cmek.py index 8f117e043670..49a98140c6ac 100644 --- a/bigquery/samples/copy_table_cmek.py +++ b/bigquery/samples/copy_table_cmek.py @@ -13,7 +13,7 @@ # limitations under the License. -def copy_table_cmek(client, dest_table_id, orig_table_id): +def copy_table_cmek(client, dest_table_id, orig_table_id, kms_key_name): # [START bigquery_copy_table_cmek] from google.cloud import bigquery @@ -29,9 +29,9 @@ def copy_table_cmek(client, dest_table_id, orig_table_id): # Set the encryption key to use for the destination. # TODO(developer): Replace this key with a key you have created in KMS. - kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us", "test", "test" - ) + # kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( + # your-project, location, your-ring, your-key + # ) encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) job_config = bigquery.CopyJobConfig() diff --git a/bigquery/samples/copy_table_multiple_source.py b/bigquery/samples/copy_table_multiple_source.py index e178e3d747b8..6f54d592b7ca 100644 --- a/bigquery/samples/copy_table_multiple_source.py +++ b/bigquery/samples/copy_table_multiple_source.py @@ -13,7 +13,7 @@ # limitations under the License. -def copy_table_multiple_source(client, dest_table_id, tables_ids): +def copy_table_multiple_source(client, dest_table_id, table_ids): # [START bigquery_copy_table_multiple_source] # TODO(developer): Import the client library. @@ -25,15 +25,15 @@ def copy_table_multiple_source(client, dest_table_id, tables_ids): # TODO(developer): Set dest_table_id to the ID of the destination table. # dest_table_id = "your-project.your_dataset.your_table_name" - # TODO(developer): Set tables_ids to the list of the IDs of the original tables. - # tables_ids = ["your-project.your_dataset.your_table_name", ...] + # TODO(developer): Set table_ids to the list of the IDs of the original tables. + # table_ids = ["your-project.your_dataset.your_table_name", ...] job = client.copy_table( - tables_ids, + table_ids, dest_table_id, location="US", # Must match the source and the destination dataset(s) location. ) # Make an API request. job.result() # Wait for the job to complete. - print("A copy of {} tables has been created".format(len(tables_ids))) + print("The tables {} have been appended to {}".format(table_ids, dest_table_id)) # [END bigquery_copy_table_multiple_source] diff --git a/bigquery/samples/tests/conftest.py b/bigquery/samples/tests/conftest.py index 32b23931aa91..91f0a105f808 100644 --- a/bigquery/samples/tests/conftest.py +++ b/bigquery/samples/tests/conftest.py @@ -132,3 +132,8 @@ def model_id(client, dataset_id): client.query(sql).result() return model_id + + +@pytest.fixture +def kms_key_name(): + return "projects/cloud-samples-tests/locations/us/keyRings/test/cryptoKeys/test" diff --git a/bigquery/samples/tests/test_client_query_batch.py b/bigquery/samples/tests/test_client_query_batch.py index ceafff0687a3..59839d2268ad 100644 --- a/bigquery/samples/tests/test_client_query_batch.py +++ b/bigquery/samples/tests/test_client_query_batch.py @@ -18,6 +18,6 @@ def test_client_query_batch(capsys, client): - job_id = client_query_batch.client_query_batch(client) + job = client_query_batch.client_query_batch(client) out, err = capsys.readouterr() - assert "Job {} is currently in state DONE".format(job_id) in out + assert "Job {} is currently in state {}".format(job.job_id, job.state) in out diff --git a/bigquery/samples/tests/test_client_query_destination_table_cmek.py b/bigquery/samples/tests/test_client_query_destination_table_cmek.py index 975f27bf5279..8c38f9626dfa 100644 --- a/bigquery/samples/tests/test_client_query_destination_table_cmek.py +++ b/bigquery/samples/tests/test_client_query_destination_table_cmek.py @@ -16,10 +16,12 @@ from .. import client_query_destination_table_cmek -def test_client_query_destination_table_cmek(capsys, client, random_table_id): +def test_client_query_destination_table_cmek( + capsys, client, random_table_id, kms_key_name +): client_query_destination_table_cmek.client_query_destination_table_cmek( - client, random_table_id + client, random_table_id, kms_key_name ) out, err = capsys.readouterr() assert "The destination table is written using the encryption configuration" in out diff --git a/bigquery/samples/tests/test_client_query_dry_run.py b/bigquery/samples/tests/test_client_query_dry_run.py index 2da94ae9f633..126b44dcd053 100644 --- a/bigquery/samples/tests/test_client_query_dry_run.py +++ b/bigquery/samples/tests/test_client_query_dry_run.py @@ -20,6 +20,6 @@ def test_client_query_dry_run(capsys, client): query_job = client_query_dry_run.client_query_dry_run(client) out, err = capsys.readouterr() - assert "This query will process 65935918 bytes." in out + assert "This query will process 0 bytes." not in out assert query_job.state == "DONE" assert query_job.dry_run diff --git a/bigquery/samples/tests/test_client_query_legacy_sql.py b/bigquery/samples/tests/test_client_query_legacy_sql.py index 2ae544e1ecdb..1c9936ebb80d 100644 --- a/bigquery/samples/tests/test_client_query_legacy_sql.py +++ b/bigquery/samples/tests/test_client_query_legacy_sql.py @@ -20,4 +20,4 @@ def test_client_query_legacy_sql(capsys, client): client_query_legacy_sql.client_query_legacy_sql(client) out, err = capsys.readouterr() - assert "Row(('Frances',), {'name': 0})" in out + assert "Row((" in out diff --git a/bigquery/samples/tests/test_copy_table_cmek.py b/bigquery/samples/tests/test_copy_table_cmek.py index 4123a658ac41..6b2ab8f83638 100644 --- a/bigquery/samples/tests/test_copy_table_cmek.py +++ b/bigquery/samples/tests/test_copy_table_cmek.py @@ -16,8 +16,12 @@ from .. import copy_table_cmek -def test_copy_table_cmek(capsys, client, random_table_id, table_with_data_id): +def test_copy_table_cmek( + capsys, client, random_table_id, table_with_data_id, kms_key_name +): - copy_table_cmek.copy_table_cmek(client, random_table_id, table_with_data_id) + copy_table_cmek.copy_table_cmek( + client, random_table_id, table_with_data_id, kms_key_name + ) out, err = capsys.readouterr() assert "A copy of the table created" in out diff --git a/bigquery/samples/tests/test_copy_table_multiple_source.py b/bigquery/samples/tests/test_copy_table_multiple_source.py index 81918802a037..65cc132bf631 100644 --- a/bigquery/samples/tests/test_copy_table_multiple_source.py +++ b/bigquery/samples/tests/test_copy_table_multiple_source.py @@ -39,13 +39,16 @@ def test_copy_table_multiple_source(capsys, client, random_table_id, random_data body, table_ref, location="US", job_config=job_config ).result() - tables_ids = [ + table_ids = [ "{}.table1".format(random_dataset_id), "{}.table2".format(random_dataset_id), ] copy_table_multiple_source.copy_table_multiple_source( - client, random_table_id, tables_ids + client, random_table_id, table_ids ) out, err = capsys.readouterr() - assert "A copy of 2 tables has been created" in out + assert ( + "The tables {} have been appended to {}".format(table_ids, random_table_id) + in out + ) From 1ab9eb7fcec413f48753eca8714c7e94ae3ff4ef Mon Sep 17 00:00:00 2001 From: Leonid Emar-Kar Date: Mon, 21 Oct 2019 13:43:55 +0300 Subject: [PATCH 05/14] docs session fix --- bigquery/docs/usage/encryption.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery/docs/usage/encryption.rst b/bigquery/docs/usage/encryption.rst index 88d23067995e..a4bc3118a803 100644 --- a/bigquery/docs/usage/encryption.rst +++ b/bigquery/docs/usage/encryption.rst @@ -36,7 +36,7 @@ Cloud KMS for the destination table. Copy a table, using a customer-managed encryption key from Cloud KMS for the destination table. -.. literalinclude:: ../snippets.py +.. literalinclude:: ../../samples/copy_table_cmek.py :language: python :dedent: 4 :start-after: [START bigquery_copy_table_cmek] @@ -45,7 +45,7 @@ destination table. Write query results to a table, using a customer-managed encryption key from Cloud KMS for the destination table. -.. literalinclude:: ../snippets.py +.. literalinclude:: ../../samples/client_query_destination_table_cmek.py :language: python :dedent: 4 :start-after: [START bigquery_query_destination_table_cmek] From 14fbab4f2ed8fe30be30a2a2cdbdddd3ce0e5331 Mon Sep 17 00:00:00 2001 From: Leonid Emar-Kar Date: Mon, 21 Oct 2019 13:43:55 +0300 Subject: [PATCH 06/14] docs session fix --- bigquery/docs/usage/encryption.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery/docs/usage/encryption.rst b/bigquery/docs/usage/encryption.rst index 88d23067995e..a4bc3118a803 100644 --- a/bigquery/docs/usage/encryption.rst +++ b/bigquery/docs/usage/encryption.rst @@ -36,7 +36,7 @@ Cloud KMS for the destination table. Copy a table, using a customer-managed encryption key from Cloud KMS for the destination table. -.. literalinclude:: ../snippets.py +.. literalinclude:: ../../samples/copy_table_cmek.py :language: python :dedent: 4 :start-after: [START bigquery_copy_table_cmek] @@ -45,7 +45,7 @@ destination table. Write query results to a table, using a customer-managed encryption key from Cloud KMS for the destination table. -.. literalinclude:: ../snippets.py +.. literalinclude:: ../../samples/client_query_destination_table_cmek.py :language: python :dedent: 4 :start-after: [START bigquery_query_destination_table_cmek] From f15033c2349d902e5946dd2a896a3829f076ae69 Mon Sep 17 00:00:00 2001 From: Leonid Emar-Kar Date: Mon, 21 Oct 2019 15:04:30 +0300 Subject: [PATCH 07/14] update docs --- bigquery/docs/usage/encryption.rst | 4 ++-- bigquery/docs/usage/queries.rst | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bigquery/docs/usage/encryption.rst b/bigquery/docs/usage/encryption.rst index a4bc3118a803..b512e6c4d7bf 100644 --- a/bigquery/docs/usage/encryption.rst +++ b/bigquery/docs/usage/encryption.rst @@ -36,7 +36,7 @@ Cloud KMS for the destination table. Copy a table, using a customer-managed encryption key from Cloud KMS for the destination table. -.. literalinclude:: ../../samples/copy_table_cmek.py +.. literalinclude:: ../samples/copy_table_cmek.py :language: python :dedent: 4 :start-after: [START bigquery_copy_table_cmek] @@ -45,7 +45,7 @@ destination table. Write query results to a table, using a customer-managed encryption key from Cloud KMS for the destination table. -.. literalinclude:: ../../samples/client_query_destination_table_cmek.py +.. literalinclude:: ../samples/client_query_destination_table_cmek.py :language: python :dedent: 4 :start-after: [START bigquery_query_destination_table_cmek] diff --git a/bigquery/docs/usage/queries.rst b/bigquery/docs/usage/queries.rst index 1f0720e47f1a..9c97d37bc1c9 100644 --- a/bigquery/docs/usage/queries.rst +++ b/bigquery/docs/usage/queries.rst @@ -17,7 +17,7 @@ Run a query and wait for it to finish with the Run a dry run query ^^^^^^^^^^^^^^^^^^^ -.. literalinclude:: ../snippets.py +.. literalinclude:: ../samples/client_query_dry_run.py :language: python :dedent: 4 :start-after: [START bigquery_query_dry_run] @@ -30,7 +30,7 @@ Writing query results to a destination table See BigQuery documentation for more information on `writing query results `_. -.. literalinclude:: ../snippets.py +.. literalinclude:: ../samples/client_query_destination_table.py :language: python :dedent: 4 :start-after: [START bigquery_query_destination_table] From bfa3ec2bf5dc5e52b34f3de7385c287fb8968201 Mon Sep 17 00:00:00 2001 From: Leonid Emar-Kar Date: Thu, 24 Oct 2019 13:32:58 +0300 Subject: [PATCH 08/14] assertion update with re --- bigquery/samples/tests/test_client_query_legacy_sql.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bigquery/samples/tests/test_client_query_legacy_sql.py b/bigquery/samples/tests/test_client_query_legacy_sql.py index 1c9936ebb80d..0ba902da0894 100644 --- a/bigquery/samples/tests/test_client_query_legacy_sql.py +++ b/bigquery/samples/tests/test_client_query_legacy_sql.py @@ -13,6 +13,8 @@ # limitations under the License. +import re + from .. import client_query_legacy_sql @@ -20,4 +22,4 @@ def test_client_query_legacy_sql(capsys, client): client_query_legacy_sql.client_query_legacy_sql(client) out, err = capsys.readouterr() - assert "Row((" in out + assert re.search(r"(Row[\w(){}:', ]+)$", out) From 0e50ead5aef211c6ae63ecee2266359a6843e89f Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Sat, 19 Oct 2019 19:55:04 +0300 Subject: [PATCH 09/14] docs(bigquery): remove location parameter from samples The parameter can sometimes confuse new BigQuery developers. Since location autodetection now works pretty well, the parameter can be removed from code samples for better clarity, except where the samples want to explicitly demonstrate its usage. --- bigquery/samples/client_query.py | 5 +---- bigquery/samples/client_query_add_column.py | 1 - bigquery/samples/client_query_batch.py | 6 ++---- bigquery/samples/client_query_destination_table.py | 6 +----- bigquery/samples/client_query_destination_table_cmek.py | 4 +--- bigquery/samples/client_query_destination_table_legacy.py | 6 +----- bigquery/samples/client_query_dry_run.py | 1 - bigquery/samples/client_query_legacy_sql.py | 6 +----- bigquery/samples/client_query_relax_column.py | 1 - bigquery/samples/copy_table.py | 6 +----- bigquery/samples/copy_table_cmek.py | 7 +------ bigquery/samples/copy_table_multiple_source.py | 6 +----- bigquery/samples/load_table_dataframe.py | 5 +---- bigquery/samples/tests/test_create_job.py | 2 +- 14 files changed, 12 insertions(+), 50 deletions(-) diff --git a/bigquery/samples/client_query.py b/bigquery/samples/client_query.py index b2cf0c8637f2..5242c854e220 100644 --- a/bigquery/samples/client_query.py +++ b/bigquery/samples/client_query.py @@ -30,10 +30,7 @@ def client_query(client): ORDER BY total_people DESC LIMIT 20 """ - query_job = client.query( - query, - location="US", # Must match the source and the destination dataset(s) location. - ) # Make an API request. + query_job = client.query(query) # Make an API request. print("The query data:") for row in query_job: diff --git a/bigquery/samples/client_query_add_column.py b/bigquery/samples/client_query_add_column.py index c26cbe96622d..e2724eb300b1 100644 --- a/bigquery/samples/client_query_add_column.py +++ b/bigquery/samples/client_query_add_column.py @@ -43,7 +43,6 @@ def client_query_add_column(client, table_id): # 'age' columns, while the results of this query will contain an # additional 'favorite_color' column. 'SELECT "Timmy" as full_name, 85 as age, "Blue" as favorite_color;', - location="US", # Must match the source and the destination dataset(s) location. job_config=job_config, ) # Make an API request. query_job.result() # Wait for the job to complete. diff --git a/bigquery/samples/client_query_batch.py b/bigquery/samples/client_query_batch.py index 9dab2b70a78e..e9de54512cca 100644 --- a/bigquery/samples/client_query_batch.py +++ b/bigquery/samples/client_query_batch.py @@ -32,13 +32,11 @@ def client_query_batch(client): """ # Start the query, passing in the extra configuration. - query_job = client.query( - sql, location="US", job_config=job_config - ) # Make an API request. + query_job = client.query(sql, job_config=job_config) # Make an API request. # Check on the progress by getting the job's updated state. Once the state # is `DONE`, the results are ready. - query_job = client.get_job(query_job.job_id, location="US") # Make an API request. + query_job = client.get_job(query_job.job_id) # Make an API request. print("Job {} is currently in state {}".format(query_job.job_id, query_job.state)) # [END bigquery_query_batch] diff --git a/bigquery/samples/client_query_destination_table.py b/bigquery/samples/client_query_destination_table.py index 471667780c41..f40ce3a5afc1 100644 --- a/bigquery/samples/client_query_destination_table.py +++ b/bigquery/samples/client_query_destination_table.py @@ -35,11 +35,7 @@ def client_query_destination_table(client, table_id): """ # Start the query, passing in the extra configuration. - query_job = client.query( - sql, - location="US", # Must match the source and the destination dataset(s) location. - job_config=job_config, - ) # Make an API request. + query_job = client.query(sql, job_config=job_config) # Make an API request. query_job.result() # Wait for the job to complete. print("Query results loaded to the table {}".format(table_id)) diff --git a/bigquery/samples/client_query_destination_table_cmek.py b/bigquery/samples/client_query_destination_table_cmek.py index a8749ba35384..06c02038ba7a 100644 --- a/bigquery/samples/client_query_destination_table_cmek.py +++ b/bigquery/samples/client_query_destination_table_cmek.py @@ -38,9 +38,7 @@ def client_query_destination_table_cmek(client, table_id, kms_key_name): # Start the query, passing in the extra configuration. query_job = client.query( - "SELECT 17 AS my_col;", - location="US", # Must match the source and the destination dataset(s) location. - job_config=job_config, + "SELECT 17 AS my_col;", job_config=job_config ) # Make an API request. query_job.result() # Wait for the job to complete. diff --git a/bigquery/samples/client_query_destination_table_legacy.py b/bigquery/samples/client_query_destination_table_legacy.py index 7d30c20f1d6f..88c38b79e839 100644 --- a/bigquery/samples/client_query_destination_table_legacy.py +++ b/bigquery/samples/client_query_destination_table_legacy.py @@ -40,11 +40,7 @@ def client_query_destination_table_legacy(client, table_id): """ # Start the query, passing in the extra configuration. - query_job = client.query( - sql, - location="US", # Must match the source and the destination dataset(s) location. - job_config=job_config, - ) # Make an API request. + query_job = client.query(sql, job_config=job_config) # Make an API request. query_job.result() # Wait for the job to complete. print("Query results loaded to the table {}".format(table_id)) diff --git a/bigquery/samples/client_query_dry_run.py b/bigquery/samples/client_query_dry_run.py index 4094d5de4dcf..8b6f018a5a90 100644 --- a/bigquery/samples/client_query_dry_run.py +++ b/bigquery/samples/client_query_dry_run.py @@ -33,7 +33,6 @@ def client_query_dry_run(client): "WHERE state = 'WA' " "GROUP BY name" ), - location="US", # Must match the source and the destination dataset(s) location. job_config=job_config, ) # Make an API request. diff --git a/bigquery/samples/client_query_legacy_sql.py b/bigquery/samples/client_query_legacy_sql.py index 8400a9acc60a..f9e2c69b6ae4 100644 --- a/bigquery/samples/client_query_legacy_sql.py +++ b/bigquery/samples/client_query_legacy_sql.py @@ -32,11 +32,7 @@ def client_query_legacy_sql(client): job_config.use_legacy_sql = True # Start the query, passing in the extra configuration. - query_job = client.query( - query, - location="US", # Must match the source and the destination dataset(s) location. - job_config=job_config, - ) # Make an API request. + query_job = client.query(query, job_config=job_config) # Make an API request. print("The query data:") for row in query_job: diff --git a/bigquery/samples/client_query_relax_column.py b/bigquery/samples/client_query_relax_column.py index 4b3a5080df6c..48c264a16c68 100644 --- a/bigquery/samples/client_query_relax_column.py +++ b/bigquery/samples/client_query_relax_column.py @@ -45,7 +45,6 @@ def client_query_relax_column(client, table_id): # In this example, the existing table contains 'full_name' and 'age' as # required columns, but the query results will omit the second column. 'SELECT "Beyonce" as full_name;', - location="US", # Must match the source and the destination dataset(s) location. job_config=job_config, ) # Make an API request. query_job.result() # Wait for the job to complete. diff --git a/bigquery/samples/copy_table.py b/bigquery/samples/copy_table.py index 28fb5ceb0cb8..20f6776cf87d 100644 --- a/bigquery/samples/copy_table.py +++ b/bigquery/samples/copy_table.py @@ -28,11 +28,7 @@ def copy_table(client, source_table_id, destination_table_id): # TODO(developer): Set destination_table_id to the ID of the destination table. # destination_table_id = "your-project.destination_dataset.destination_table" - job = client.copy_table( - source_table_id, - destination_table_id, - location="US", # Must match the source and the destination dataset(s) location. - ) + job = client.copy_table(source_table_id, destination_table_id) job.result() # Wait for the job to complete. print("A copy of the table created.") diff --git a/bigquery/samples/copy_table_cmek.py b/bigquery/samples/copy_table_cmek.py index 49a98140c6ac..0aa299084d19 100644 --- a/bigquery/samples/copy_table_cmek.py +++ b/bigquery/samples/copy_table_cmek.py @@ -36,12 +36,7 @@ def copy_table_cmek(client, dest_table_id, orig_table_id, kms_key_name): encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) job_config = bigquery.CopyJobConfig() job_config.destination_encryption_configuration = encryption_config - job = client.copy_table( - orig_table_id, - dest_table_id, - location="US", # Must match the source and the destination dataset(s) location. - job_config=job_config, - ) + job = client.copy_table(orig_table_id, dest_table_id, job_config=job_config) job.result() # Wait for the job to complete. dest_table = client.get_table(dest_table_id) # Make an API request. diff --git a/bigquery/samples/copy_table_multiple_source.py b/bigquery/samples/copy_table_multiple_source.py index 6f54d592b7ca..532ea0a0ab90 100644 --- a/bigquery/samples/copy_table_multiple_source.py +++ b/bigquery/samples/copy_table_multiple_source.py @@ -28,11 +28,7 @@ def copy_table_multiple_source(client, dest_table_id, table_ids): # TODO(developer): Set table_ids to the list of the IDs of the original tables. # table_ids = ["your-project.your_dataset.your_table_name", ...] - job = client.copy_table( - table_ids, - dest_table_id, - location="US", # Must match the source and the destination dataset(s) location. - ) # Make an API request. + job = client.copy_table(table_ids, dest_table_id) # Make an API request. job.result() # Wait for the job to complete. print("The tables {} have been appended to {}".format(table_ids, dest_table_id)) diff --git a/bigquery/samples/load_table_dataframe.py b/bigquery/samples/load_table_dataframe.py index 7133b0bfbcc1..8cfb34424457 100644 --- a/bigquery/samples/load_table_dataframe.py +++ b/bigquery/samples/load_table_dataframe.py @@ -61,10 +61,7 @@ def load_table_dataframe(client, table_id): ) job = client.load_table_from_dataframe( - dataframe, - table_id, - job_config=job_config, - location="US", # Must match the source and the destination dataset(s) location. + dataframe, table_id, job_config=job_config ) # Make an API request. job.result() # Wait for the job to complete. diff --git a/bigquery/samples/tests/test_create_job.py b/bigquery/samples/tests/test_create_job.py index 5ead51156606..526d7679b1f1 100644 --- a/bigquery/samples/tests/test_create_job.py +++ b/bigquery/samples/tests/test_create_job.py @@ -19,6 +19,6 @@ def test_create_job(capsys, client): query_job = create_job.create_job(client) - client.cancel_job(query_job.job_id, location="US") + client.cancel_job(query_job.job_id) out, err = capsys.readouterr() assert "Started job: {}".format(query_job.job_id) in out From 04555f975fe6f05b694172f74d6712bc3cdacb32 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 25 Oct 2019 21:54:44 +0300 Subject: [PATCH 10/14] Pass location where auto-detection not supported --- bigquery/samples/client_query_batch.py | 4 +++- bigquery/samples/tests/test_create_job.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/bigquery/samples/client_query_batch.py b/bigquery/samples/client_query_batch.py index e9de54512cca..73ddd4f83ae2 100644 --- a/bigquery/samples/client_query_batch.py +++ b/bigquery/samples/client_query_batch.py @@ -36,7 +36,9 @@ def client_query_batch(client): # Check on the progress by getting the job's updated state. Once the state # is `DONE`, the results are ready. - query_job = client.get_job(query_job.job_id) # Make an API request. + query_job = client.get_job( + query_job.job_id, location=query_job.location + ) # Make an API request. print("Job {} is currently in state {}".format(query_job.job_id, query_job.state)) # [END bigquery_query_batch] diff --git a/bigquery/samples/tests/test_create_job.py b/bigquery/samples/tests/test_create_job.py index 526d7679b1f1..3cda34bf0848 100644 --- a/bigquery/samples/tests/test_create_job.py +++ b/bigquery/samples/tests/test_create_job.py @@ -19,6 +19,6 @@ def test_create_job(capsys, client): query_job = create_job.create_job(client) - client.cancel_job(query_job.job_id) + client.cancel_job(query_job.job_id, location=query_job.location) out, err = capsys.readouterr() assert "Started job: {}".format(query_job.job_id) in out From b2cb5a15173af75543641672feec0c53a2e5d2b3 Mon Sep 17 00:00:00 2001 From: Leonid Emar-Kar Date: Tue, 5 Nov 2019 13:48:19 +0300 Subject: [PATCH 11/14] update QueryJobConfig --- bigquery/samples/client_query_add_column.py | 3 +-- bigquery/samples/client_query_batch.py | 7 ++++--- bigquery/samples/client_query_destination_table.py | 4 +--- .../samples/client_query_destination_table_cmek.py | 11 ++++++----- .../client_query_destination_table_legacy.py | 13 +++++-------- bigquery/samples/client_query_dry_run.py | 4 +--- bigquery/samples/client_query_legacy_sql.py | 3 +-- bigquery/samples/client_query_relax_column.py | 3 +-- bigquery/samples/copy_table_cmek.py | 8 +++++--- .../query_external_sheets_permanent_table.py | 1 + .../query_external_sheets_temporary_table.py | 4 ++-- 11 files changed, 28 insertions(+), 33 deletions(-) diff --git a/bigquery/samples/client_query_add_column.py b/bigquery/samples/client_query_add_column.py index e2724eb300b1..1cde370a35ed 100644 --- a/bigquery/samples/client_query_add_column.py +++ b/bigquery/samples/client_query_add_column.py @@ -30,11 +30,10 @@ def client_query_add_column(client, table_id): # Configures the query to append the results to a destination table, # allowing field addition. - job_config = bigquery.QueryJobConfig() + job_config = bigquery.QueryJobConfig(destination=table_id) job_config.schema_update_options = [ bigquery.SchemaUpdateOption.ALLOW_FIELD_ADDITION ] - job_config.destination = table_id job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND # Start the query, passing in the extra configuration. diff --git a/bigquery/samples/client_query_batch.py b/bigquery/samples/client_query_batch.py index 73ddd4f83ae2..af9fcd8a1e40 100644 --- a/bigquery/samples/client_query_batch.py +++ b/bigquery/samples/client_query_batch.py @@ -21,10 +21,11 @@ def client_query_batch(client): # TODO(developer): Construct a BigQuery client object. # client = bigquery.Client() - job_config = bigquery.QueryJobConfig() + job_config = bigquery.QueryJobConfig( + # Run at batch priority, which won't count toward concurrent rate limit. + priority=bigquery.QueryPriority.BATCH + ) - # Run at batch priority, which won't count toward concurrent rate limit. - job_config.priority = bigquery.QueryPriority.BATCH sql = """ SELECT corpus FROM `bigquery-public-data.samples.shakespeare` diff --git a/bigquery/samples/client_query_destination_table.py b/bigquery/samples/client_query_destination_table.py index f40ce3a5afc1..876df7904d9c 100644 --- a/bigquery/samples/client_query_destination_table.py +++ b/bigquery/samples/client_query_destination_table.py @@ -24,10 +24,8 @@ def client_query_destination_table(client, table_id): # TODO(developer): Set table_id to the ID of the destination table. # table_id = "your-project.your_dataset.your_table_name" - job_config = bigquery.QueryJobConfig() + job_config = bigquery.QueryJobConfig(destination=table_id) - table = client.get_table(table_id) # Make an API request. - job_config.destination = table sql = """ SELECT corpus FROM `bigquery-public-data.samples.shakespeare` diff --git a/bigquery/samples/client_query_destination_table_cmek.py b/bigquery/samples/client_query_destination_table_cmek.py index 06c02038ba7a..d3409eecd77d 100644 --- a/bigquery/samples/client_query_destination_table_cmek.py +++ b/bigquery/samples/client_query_destination_table_cmek.py @@ -30,11 +30,12 @@ def client_query_destination_table_cmek(client, table_id, kms_key_name): # your-project, location, your-ring, your-key # ) - job_config = bigquery.QueryJobConfig() - job_config.destination = table_id - - encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) - job_config.destination_encryption_configuration = encryption_config + job_config = bigquery.QueryJobConfig( + destination=table_id, + destination_encryption_configuration=bigquery.EncryptionConfiguration( + kms_key_name=kms_key_name + ), + ) # Start the query, passing in the extra configuration. query_job = client.query( diff --git a/bigquery/samples/client_query_destination_table_legacy.py b/bigquery/samples/client_query_destination_table_legacy.py index 88c38b79e839..8e977a92d996 100644 --- a/bigquery/samples/client_query_destination_table_legacy.py +++ b/bigquery/samples/client_query_destination_table_legacy.py @@ -24,15 +24,12 @@ def client_query_destination_table_legacy(client, table_id): # TODO(developer): Set table_id to the ID of the destination table. # table_id = "your-project.your_dataset.your_table_name" - job_config = bigquery.QueryJobConfig() + # Set the destination table and use_legacy_sql to True to use + # legacy SQL syntax. + job_config = bigquery.QueryJobConfig( + allow_large_results=True, destination=table_id, use_legacy_sql=True + ) - # Set use_legacy_sql to True to use legacy SQL syntax. - job_config.use_legacy_sql = True - - # Set the destination table. - table = client.get_table(table_id) # Make an API request. - job_config.destination = table - job_config.allow_large_results = True sql = """ SELECT corpus FROM [bigquery-public-data:samples.shakespeare] diff --git a/bigquery/samples/client_query_dry_run.py b/bigquery/samples/client_query_dry_run.py index 8b6f018a5a90..2d09a1c25f4a 100644 --- a/bigquery/samples/client_query_dry_run.py +++ b/bigquery/samples/client_query_dry_run.py @@ -21,9 +21,7 @@ def client_query_dry_run(client): # TODO(developer): Construct a BigQuery client object. # client = bigquery.Client() - job_config = bigquery.QueryJobConfig() - job_config.dry_run = True - job_config.use_query_cache = False + job_config = bigquery.QueryJobConfig(dry_run=True, use_query_cache=False) # Start the query, passing in the extra configuration. query_job = client.query( diff --git a/bigquery/samples/client_query_legacy_sql.py b/bigquery/samples/client_query_legacy_sql.py index f9e2c69b6ae4..c8dae20649e2 100644 --- a/bigquery/samples/client_query_legacy_sql.py +++ b/bigquery/samples/client_query_legacy_sql.py @@ -28,8 +28,7 @@ def client_query_legacy_sql(client): ) # Set use_legacy_sql to True to use legacy SQL syntax. - job_config = bigquery.QueryJobConfig() - job_config.use_legacy_sql = True + job_config = bigquery.QueryJobConfig(use_legacy_sql=True) # Start the query, passing in the extra configuration. query_job = client.query(query, job_config=job_config) # Make an API request. diff --git a/bigquery/samples/client_query_relax_column.py b/bigquery/samples/client_query_relax_column.py index 48c264a16c68..d8e5743c1e33 100644 --- a/bigquery/samples/client_query_relax_column.py +++ b/bigquery/samples/client_query_relax_column.py @@ -33,11 +33,10 @@ def client_query_relax_column(client, table_id): # Configures the query to append the results to a destination table, # allowing field relaxation. - job_config = bigquery.QueryJobConfig() + job_config = bigquery.QueryJobConfig(destination=table_id) job_config.schema_update_options = [ bigquery.SchemaUpdateOption.ALLOW_FIELD_RELAXATION ] - job_config.destination = table job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND # Start the query, passing in the extra configuration. diff --git a/bigquery/samples/copy_table_cmek.py b/bigquery/samples/copy_table_cmek.py index 0aa299084d19..1e9ee198c821 100644 --- a/bigquery/samples/copy_table_cmek.py +++ b/bigquery/samples/copy_table_cmek.py @@ -33,9 +33,11 @@ def copy_table_cmek(client, dest_table_id, orig_table_id, kms_key_name): # your-project, location, your-ring, your-key # ) - encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) - job_config = bigquery.CopyJobConfig() - job_config.destination_encryption_configuration = encryption_config + job_config = bigquery.CopyJobConfig( + destination_encryption_configuration=bigquery.EncryptionConfiguration( + kms_key_name=kms_key_name + ) + ) job = client.copy_table(orig_table_id, dest_table_id, job_config=job_config) job.result() # Wait for the job to complete. diff --git a/bigquery/samples/query_external_sheets_permanent_table.py b/bigquery/samples/query_external_sheets_permanent_table.py index fd4f7577f1b1..1c4e45723ba1 100644 --- a/bigquery/samples/query_external_sheets_permanent_table.py +++ b/bigquery/samples/query_external_sheets_permanent_table.py @@ -61,6 +61,7 @@ def query_external_sheets_permanent_table(dataset_id): # Example query to find states starting with "W". sql = 'SELECT * FROM `{}.{}` WHERE name LIKE "W%"'.format(dataset_id, table_id) + query_job = client.query(sql) # Make an API request. # Wait for the query to complete. diff --git a/bigquery/samples/query_external_sheets_temporary_table.py b/bigquery/samples/query_external_sheets_temporary_table.py index 9f17e91a46cc..6549b57de438 100644 --- a/bigquery/samples/query_external_sheets_temporary_table.py +++ b/bigquery/samples/query_external_sheets_temporary_table.py @@ -52,11 +52,11 @@ def query_external_sheets_temporary_table(): "us-states!A20:B49" ) # Optionally set range of the sheet to query from. table_id = "us_states" - job_config = bigquery.QueryJobConfig() - job_config.table_definitions = {table_id: external_config} + job_config = bigquery.QueryJobConfig(table_definitions={table_id: external_config}) # Example query to find states starting with "W". sql = 'SELECT * FROM `{}` WHERE name LIKE "W%"'.format(table_id) + query_job = client.query(sql, job_config=job_config) # Make an API request. # Wait for the query to complete. From 1c135d7472c13ac3e770ab2257cb9240b9d2b271 Mon Sep 17 00:00:00 2001 From: Leonid Emar-Kar Date: Mon, 11 Nov 2019 16:45:58 +0300 Subject: [PATCH 12/14] unify undelete_table sample --- bigquery/samples/undelete_table.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bigquery/samples/undelete_table.py b/bigquery/samples/undelete_table.py index 2d544cf5aa8c..9db9712d2a74 100644 --- a/bigquery/samples/undelete_table.py +++ b/bigquery/samples/undelete_table.py @@ -46,7 +46,7 @@ def undelete_table(client, table_id, recovered_table_id): # [END_EXCLUDE] # "Accidentally" delete the table. - client.delete_table(table_id) # API request + client.delete_table(table_id) # Make an API request. # Construct the restore-from table ID using a snapshot decorator. snapshot_table_id = "{}@{}".format(table_id, snapshot_epoch) @@ -55,11 +55,11 @@ def undelete_table(client, table_id, recovered_table_id): job = client.copy_table( snapshot_table_id, recovered_table_id, - # Location must match that of the source and destination tables. + # Must match the source and destination tables location. location="US", - ) # API request + ) # Make an API request. - job.result() # Wait for job to complete. + job.result() # Wait for the job to complete. print( "Copied data from deleted table {} to {}".format(table_id, recovered_table_id) From 1eb6a2501eed42836604cdafa19063fafbb4c1e3 Mon Sep 17 00:00:00 2001 From: Leonid Emar-Kar Date: Thu, 28 Nov 2019 10:42:00 +0300 Subject: [PATCH 13/14] refactor: update test files with the new conditions --- .../tests/test_client_query_destination_table_legacy.py | 6 +++--- bigquery/samples/tests/test_client_query_dry_run.py | 3 ++- bigquery/samples/tests/test_copy_table_multiple_source.py | 2 ++ 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/bigquery/samples/tests/test_client_query_destination_table_legacy.py b/bigquery/samples/tests/test_client_query_destination_table_legacy.py index 1e9dcc96d057..8c5639f656ac 100644 --- a/bigquery/samples/tests/test_client_query_destination_table_legacy.py +++ b/bigquery/samples/tests/test_client_query_destination_table_legacy.py @@ -16,10 +16,10 @@ from .. import client_query_destination_table_legacy -def test_client_query_destination_table_legacy(capsys, client, table_id): +def test_client_query_destination_table_legacy(capsys, client, random_table_id): client_query_destination_table_legacy.client_query_destination_table_legacy( - client, table_id + client, random_table_id ) out, err = capsys.readouterr() - assert "Query results loaded to the table {}".format(table_id) in out + assert "Query results loaded to the table {}".format(random_table_id) in out diff --git a/bigquery/samples/tests/test_client_query_dry_run.py b/bigquery/samples/tests/test_client_query_dry_run.py index 126b44dcd053..7e9a6715eed2 100644 --- a/bigquery/samples/tests/test_client_query_dry_run.py +++ b/bigquery/samples/tests/test_client_query_dry_run.py @@ -20,6 +20,7 @@ def test_client_query_dry_run(capsys, client): query_job = client_query_dry_run.client_query_dry_run(client) out, err = capsys.readouterr() - assert "This query will process 0 bytes." not in out + assert "This query will process" in out assert query_job.state == "DONE" assert query_job.dry_run + assert query_job.total_bytes_processed > 0 diff --git a/bigquery/samples/tests/test_copy_table_multiple_source.py b/bigquery/samples/tests/test_copy_table_multiple_source.py index 65cc132bf631..71c2bc1d8fd9 100644 --- a/bigquery/samples/tests/test_copy_table_multiple_source.py +++ b/bigquery/samples/tests/test_copy_table_multiple_source.py @@ -47,8 +47,10 @@ def test_copy_table_multiple_source(capsys, client, random_table_id, random_data copy_table_multiple_source.copy_table_multiple_source( client, random_table_id, table_ids ) + dest_table = client.get_table(random_table_id) out, err = capsys.readouterr() assert ( "The tables {} have been appended to {}".format(table_ids, random_table_id) in out ) + assert dest_table.num_rows > 0 From 16dd3f8d73b99a9ff8e3ed0b17f86bf09a772807 Mon Sep 17 00:00:00 2001 From: Leonid Emar-Kar Date: Thu, 28 Nov 2019 10:45:53 +0300 Subject: [PATCH 14/14] refactor: remove extra blank line in test files --- bigquery/samples/tests/test_add_empty_column.py | 1 - bigquery/samples/tests/test_browse_table_data.py | 1 - bigquery/samples/tests/test_client_list_jobs.py | 1 - bigquery/samples/tests/test_client_query.py | 1 - bigquery/samples/tests/test_client_query_add_column.py | 1 - bigquery/samples/tests/test_client_query_batch.py | 1 - bigquery/samples/tests/test_client_query_destination_table.py | 1 - .../samples/tests/test_client_query_destination_table_cmek.py | 1 - .../samples/tests/test_client_query_destination_table_legacy.py | 1 - bigquery/samples/tests/test_client_query_dry_run.py | 1 - bigquery/samples/tests/test_client_query_legacy_sql.py | 1 - bigquery/samples/tests/test_client_query_relax_column.py | 1 - bigquery/samples/tests/test_copy_table.py | 1 - bigquery/samples/tests/test_copy_table_cmek.py | 1 - bigquery/samples/tests/test_copy_table_multiple_source.py | 1 - bigquery/samples/tests/test_create_dataset.py | 1 - bigquery/samples/tests/test_create_job.py | 1 - bigquery/samples/tests/test_create_table.py | 1 - bigquery/samples/tests/test_create_table_range_partitioned.py | 1 - bigquery/samples/tests/test_dataset_exists.py | 1 - bigquery/samples/tests/test_dataset_label_samples.py | 1 - bigquery/samples/tests/test_delete_dataset.py | 1 - bigquery/samples/tests/test_delete_table.py | 1 - bigquery/samples/tests/test_get_dataset.py | 1 - bigquery/samples/tests/test_get_table.py | 1 - bigquery/samples/tests/test_list_datasets.py | 1 - bigquery/samples/tests/test_list_datasets_by_label.py | 1 - bigquery/samples/tests/test_list_tables.py | 1 - bigquery/samples/tests/test_load_table_dataframe.py | 1 - bigquery/samples/tests/test_model_samples.py | 1 - .../samples/tests/test_query_external_sheets_permanent_table.py | 1 - .../samples/tests/test_query_external_sheets_temporary_table.py | 1 - bigquery/samples/tests/test_query_to_arrow.py | 1 - bigquery/samples/tests/test_routine_samples.py | 1 - bigquery/samples/tests/test_table_exists.py | 1 - bigquery/samples/tests/test_table_insert_rows.py | 1 - .../tests/test_table_insert_rows_explicit_none_insert_ids.py | 1 - bigquery/samples/tests/test_update_dataset_access.py | 1 - .../tests/test_update_dataset_default_table_expiration.py | 1 - bigquery/samples/tests/test_update_dataset_description.py | 1 - 40 files changed, 40 deletions(-) diff --git a/bigquery/samples/tests/test_add_empty_column.py b/bigquery/samples/tests/test_add_empty_column.py index e6c56e6cbfbc..de51bfed7672 100644 --- a/bigquery/samples/tests/test_add_empty_column.py +++ b/bigquery/samples/tests/test_add_empty_column.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import add_empty_column diff --git a/bigquery/samples/tests/test_browse_table_data.py b/bigquery/samples/tests/test_browse_table_data.py index 0e9cc6055494..db9b867f5ab7 100644 --- a/bigquery/samples/tests/test_browse_table_data.py +++ b/bigquery/samples/tests/test_browse_table_data.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import browse_table_data diff --git a/bigquery/samples/tests/test_client_list_jobs.py b/bigquery/samples/tests/test_client_list_jobs.py index 011e081fdee4..ada053239802 100644 --- a/bigquery/samples/tests/test_client_list_jobs.py +++ b/bigquery/samples/tests/test_client_list_jobs.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import client_list_jobs from .. import create_job diff --git a/bigquery/samples/tests/test_client_query.py b/bigquery/samples/tests/test_client_query.py index fd5b8e7edd97..e73e7e5a0eb4 100644 --- a/bigquery/samples/tests/test_client_query.py +++ b/bigquery/samples/tests/test_client_query.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import client_query diff --git a/bigquery/samples/tests/test_client_query_add_column.py b/bigquery/samples/tests/test_client_query_add_column.py index 7fe68f237a87..67ac328d5518 100644 --- a/bigquery/samples/tests/test_client_query_add_column.py +++ b/bigquery/samples/tests/test_client_query_add_column.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from google.cloud import bigquery from .. import client_query_add_column diff --git a/bigquery/samples/tests/test_client_query_batch.py b/bigquery/samples/tests/test_client_query_batch.py index 59839d2268ad..79197e4565c7 100644 --- a/bigquery/samples/tests/test_client_query_batch.py +++ b/bigquery/samples/tests/test_client_query_batch.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import client_query_batch diff --git a/bigquery/samples/tests/test_client_query_destination_table.py b/bigquery/samples/tests/test_client_query_destination_table.py index 1c3dd2c5c38b..d29aaebd3ce5 100644 --- a/bigquery/samples/tests/test_client_query_destination_table.py +++ b/bigquery/samples/tests/test_client_query_destination_table.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import client_query_destination_table diff --git a/bigquery/samples/tests/test_client_query_destination_table_cmek.py b/bigquery/samples/tests/test_client_query_destination_table_cmek.py index 8c38f9626dfa..cd4532be6d1d 100644 --- a/bigquery/samples/tests/test_client_query_destination_table_cmek.py +++ b/bigquery/samples/tests/test_client_query_destination_table_cmek.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import client_query_destination_table_cmek diff --git a/bigquery/samples/tests/test_client_query_destination_table_legacy.py b/bigquery/samples/tests/test_client_query_destination_table_legacy.py index 8c5639f656ac..da62baada213 100644 --- a/bigquery/samples/tests/test_client_query_destination_table_legacy.py +++ b/bigquery/samples/tests/test_client_query_destination_table_legacy.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import client_query_destination_table_legacy diff --git a/bigquery/samples/tests/test_client_query_dry_run.py b/bigquery/samples/tests/test_client_query_dry_run.py index 7e9a6715eed2..c39a22767d25 100644 --- a/bigquery/samples/tests/test_client_query_dry_run.py +++ b/bigquery/samples/tests/test_client_query_dry_run.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import client_query_dry_run diff --git a/bigquery/samples/tests/test_client_query_legacy_sql.py b/bigquery/samples/tests/test_client_query_legacy_sql.py index 0ba902da0894..fb6ee60bc6ec 100644 --- a/bigquery/samples/tests/test_client_query_legacy_sql.py +++ b/bigquery/samples/tests/test_client_query_legacy_sql.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - import re from .. import client_query_legacy_sql diff --git a/bigquery/samples/tests/test_client_query_relax_column.py b/bigquery/samples/tests/test_client_query_relax_column.py index 4db50da07376..685db9cb1fa0 100644 --- a/bigquery/samples/tests/test_client_query_relax_column.py +++ b/bigquery/samples/tests/test_client_query_relax_column.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from google.cloud import bigquery from .. import client_query_relax_column diff --git a/bigquery/samples/tests/test_copy_table.py b/bigquery/samples/tests/test_copy_table.py index 6d7de2d9132c..0138cd8ee1e2 100644 --- a/bigquery/samples/tests/test_copy_table.py +++ b/bigquery/samples/tests/test_copy_table.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import copy_table diff --git a/bigquery/samples/tests/test_copy_table_cmek.py b/bigquery/samples/tests/test_copy_table_cmek.py index 6b2ab8f83638..25238071b947 100644 --- a/bigquery/samples/tests/test_copy_table_cmek.py +++ b/bigquery/samples/tests/test_copy_table_cmek.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import copy_table_cmek diff --git a/bigquery/samples/tests/test_copy_table_multiple_source.py b/bigquery/samples/tests/test_copy_table_multiple_source.py index 71c2bc1d8fd9..755fa2ccb5e9 100644 --- a/bigquery/samples/tests/test_copy_table_multiple_source.py +++ b/bigquery/samples/tests/test_copy_table_multiple_source.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - import six from google.cloud import bigquery diff --git a/bigquery/samples/tests/test_create_dataset.py b/bigquery/samples/tests/test_create_dataset.py index e52e9ddfdced..dfadc67d8468 100644 --- a/bigquery/samples/tests/test_create_dataset.py +++ b/bigquery/samples/tests/test_create_dataset.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import create_dataset diff --git a/bigquery/samples/tests/test_create_job.py b/bigquery/samples/tests/test_create_job.py index 3cda34bf0848..bbf880cbe402 100644 --- a/bigquery/samples/tests/test_create_job.py +++ b/bigquery/samples/tests/test_create_job.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import create_job diff --git a/bigquery/samples/tests/test_create_table.py b/bigquery/samples/tests/test_create_table.py index f9ebc0e5d70d..093ee6e94277 100644 --- a/bigquery/samples/tests/test_create_table.py +++ b/bigquery/samples/tests/test_create_table.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import create_table diff --git a/bigquery/samples/tests/test_create_table_range_partitioned.py b/bigquery/samples/tests/test_create_table_range_partitioned.py index ca186f9a7554..ac312b033832 100644 --- a/bigquery/samples/tests/test_create_table_range_partitioned.py +++ b/bigquery/samples/tests/test_create_table_range_partitioned.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import create_table_range_partitioned diff --git a/bigquery/samples/tests/test_dataset_exists.py b/bigquery/samples/tests/test_dataset_exists.py index 203c4b884dc4..a44e60371120 100644 --- a/bigquery/samples/tests/test_dataset_exists.py +++ b/bigquery/samples/tests/test_dataset_exists.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from google.cloud import bigquery from .. import dataset_exists diff --git a/bigquery/samples/tests/test_dataset_label_samples.py b/bigquery/samples/tests/test_dataset_label_samples.py index 1e526f2339ac..94a2092407b0 100644 --- a/bigquery/samples/tests/test_dataset_label_samples.py +++ b/bigquery/samples/tests/test_dataset_label_samples.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import delete_dataset_labels from .. import get_dataset_labels from .. import label_dataset diff --git a/bigquery/samples/tests/test_delete_dataset.py b/bigquery/samples/tests/test_delete_dataset.py index 836b3aebb272..2b1b6ad06195 100644 --- a/bigquery/samples/tests/test_delete_dataset.py +++ b/bigquery/samples/tests/test_delete_dataset.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import delete_dataset diff --git a/bigquery/samples/tests/test_delete_table.py b/bigquery/samples/tests/test_delete_table.py index f76ad8624cc6..8f4796623a83 100644 --- a/bigquery/samples/tests/test_delete_table.py +++ b/bigquery/samples/tests/test_delete_table.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import delete_table diff --git a/bigquery/samples/tests/test_get_dataset.py b/bigquery/samples/tests/test_get_dataset.py index 8682be7ee3e9..dedec1d7b29e 100644 --- a/bigquery/samples/tests/test_get_dataset.py +++ b/bigquery/samples/tests/test_get_dataset.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import get_dataset diff --git a/bigquery/samples/tests/test_get_table.py b/bigquery/samples/tests/test_get_table.py index 8adaa6557954..b950d434aef6 100644 --- a/bigquery/samples/tests/test_get_table.py +++ b/bigquery/samples/tests/test_get_table.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from google.cloud import bigquery from .. import get_table diff --git a/bigquery/samples/tests/test_list_datasets.py b/bigquery/samples/tests/test_list_datasets.py index d8c32e91ee20..4c66a24f9b1a 100644 --- a/bigquery/samples/tests/test_list_datasets.py +++ b/bigquery/samples/tests/test_list_datasets.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import list_datasets diff --git a/bigquery/samples/tests/test_list_datasets_by_label.py b/bigquery/samples/tests/test_list_datasets_by_label.py index f414539b00b3..6d04a281ff42 100644 --- a/bigquery/samples/tests/test_list_datasets_by_label.py +++ b/bigquery/samples/tests/test_list_datasets_by_label.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import list_datasets_by_label diff --git a/bigquery/samples/tests/test_list_tables.py b/bigquery/samples/tests/test_list_tables.py index 61ac04ea26ce..ec1621ac7579 100644 --- a/bigquery/samples/tests/test_list_tables.py +++ b/bigquery/samples/tests/test_list_tables.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import list_tables diff --git a/bigquery/samples/tests/test_load_table_dataframe.py b/bigquery/samples/tests/test_load_table_dataframe.py index 2151704d3b25..3b7cb16ea692 100644 --- a/bigquery/samples/tests/test_load_table_dataframe.py +++ b/bigquery/samples/tests/test_load_table_dataframe.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - import pytest from .. import load_table_dataframe diff --git a/bigquery/samples/tests/test_model_samples.py b/bigquery/samples/tests/test_model_samples.py index 99d838533917..d7b06a92a3e1 100644 --- a/bigquery/samples/tests/test_model_samples.py +++ b/bigquery/samples/tests/test_model_samples.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import delete_model from .. import get_model from .. import list_models diff --git a/bigquery/samples/tests/test_query_external_sheets_permanent_table.py b/bigquery/samples/tests/test_query_external_sheets_permanent_table.py index a7b5db09e5af..a00930cad881 100644 --- a/bigquery/samples/tests/test_query_external_sheets_permanent_table.py +++ b/bigquery/samples/tests/test_query_external_sheets_permanent_table.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import query_external_sheets_permanent_table diff --git a/bigquery/samples/tests/test_query_external_sheets_temporary_table.py b/bigquery/samples/tests/test_query_external_sheets_temporary_table.py index 4856b6a49d2b..8274787cb644 100644 --- a/bigquery/samples/tests/test_query_external_sheets_temporary_table.py +++ b/bigquery/samples/tests/test_query_external_sheets_temporary_table.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import query_external_sheets_temporary_table diff --git a/bigquery/samples/tests/test_query_to_arrow.py b/bigquery/samples/tests/test_query_to_arrow.py index 2fbed807ece4..dd9b3ab508cc 100644 --- a/bigquery/samples/tests/test_query_to_arrow.py +++ b/bigquery/samples/tests/test_query_to_arrow.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - import pyarrow from .. import query_to_arrow diff --git a/bigquery/samples/tests/test_routine_samples.py b/bigquery/samples/tests/test_routine_samples.py index a3e82abcc7ea..81d33a0cf5df 100644 --- a/bigquery/samples/tests/test_routine_samples.py +++ b/bigquery/samples/tests/test_routine_samples.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from google.cloud import bigquery from google.cloud import bigquery_v2 diff --git a/bigquery/samples/tests/test_table_exists.py b/bigquery/samples/tests/test_table_exists.py index 232d77fbcb60..ae4fc65f847c 100644 --- a/bigquery/samples/tests/test_table_exists.py +++ b/bigquery/samples/tests/test_table_exists.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from google.cloud import bigquery from .. import table_exists diff --git a/bigquery/samples/tests/test_table_insert_rows.py b/bigquery/samples/tests/test_table_insert_rows.py index 95d119dbdc93..9c5fd5768cfb 100644 --- a/bigquery/samples/tests/test_table_insert_rows.py +++ b/bigquery/samples/tests/test_table_insert_rows.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from google.cloud import bigquery from .. import table_insert_rows diff --git a/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py b/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py index 6a59609baacf..a2a4febd7f75 100644 --- a/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py +++ b/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from google.cloud import bigquery from .. import table_insert_rows_explicit_none_insert_ids as mut diff --git a/bigquery/samples/tests/test_update_dataset_access.py b/bigquery/samples/tests/test_update_dataset_access.py index 679b700731e3..ae33dbfe4a4c 100644 --- a/bigquery/samples/tests/test_update_dataset_access.py +++ b/bigquery/samples/tests/test_update_dataset_access.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import update_dataset_access diff --git a/bigquery/samples/tests/test_update_dataset_default_table_expiration.py b/bigquery/samples/tests/test_update_dataset_default_table_expiration.py index a97de11a2f1a..46e9654209ed 100644 --- a/bigquery/samples/tests/test_update_dataset_default_table_expiration.py +++ b/bigquery/samples/tests/test_update_dataset_default_table_expiration.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import update_dataset_default_table_expiration diff --git a/bigquery/samples/tests/test_update_dataset_description.py b/bigquery/samples/tests/test_update_dataset_description.py index 63826077b976..c6f8889f50da 100644 --- a/bigquery/samples/tests/test_update_dataset_description.py +++ b/bigquery/samples/tests/test_update_dataset_description.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - from .. import update_dataset_description