Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor(bigquery): update code samples to use strings for table and dataset IDs #9495

Merged
merged 18 commits into from
Dec 2, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
399 changes: 0 additions & 399 deletions bigquery/docs/snippets.py

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions bigquery/docs/usage/encryption.rst
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ Cloud KMS for the destination table.
Copy a table, using a customer-managed encryption key from Cloud KMS for the
destination table.

.. literalinclude:: ../snippets.py
.. literalinclude:: ../samples/copy_table_cmek.py
:language: python
:dedent: 4
:start-after: [START bigquery_copy_table_cmek]
Expand All @@ -45,7 +45,7 @@ destination table.
Write query results to a table, using a customer-managed encryption key from
Cloud KMS for the destination table.

.. literalinclude:: ../snippets.py
.. literalinclude:: ../samples/client_query_destination_table_cmek.py
:language: python
:dedent: 4
:start-after: [START bigquery_query_destination_table_cmek]
Expand Down
4 changes: 2 additions & 2 deletions bigquery/docs/usage/queries.rst
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ Run a query and wait for it to finish with the
Run a dry run query
^^^^^^^^^^^^^^^^^^^

.. literalinclude:: ../snippets.py
.. literalinclude:: ../samples/client_query_dry_run.py
:language: python
:dedent: 4
:start-after: [START bigquery_query_dry_run]
Expand All @@ -30,7 +30,7 @@ Writing query results to a destination table
See BigQuery documentation for more information on
`writing query results <https://cloud.google.com/bigquery/docs/writing-results>`_.

.. literalinclude:: ../snippets.py
.. literalinclude:: ../samples/client_query_destination_table.py
emar-kar marked this conversation as resolved.
Show resolved Hide resolved
:language: python
:dedent: 4
:start-after: [START bigquery_query_destination_table]
Expand Down
4 changes: 1 addition & 3 deletions bigquery/samples/client_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,7 @@ def client_query(client):
ORDER BY total_people DESC
LIMIT 20
"""
query_job = client.query(
query, location="US" # Must match the destination dataset(s) location.
) # Make an API request.
query_job = client.query(query) # Make an API request.

print("The query data:")
for row in query_job:
Expand Down
52 changes: 52 additions & 0 deletions bigquery/samples/client_query_add_column.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


def client_query_add_column(client, table_id):

# [START bigquery_add_column_query_append]
from google.cloud import bigquery

# TODO(developer): Construct a BigQuery client object.
# client = bigquery.Client()

# TODO(developer): Set table_id to the ID of the destination table.
# table_id = "your-project.your_dataset.your_table_name"

# Retrieves the destination table and checks the length of the schema.
table = client.get_table(table_id) # Make an API request.
print("Table {} contains {} columns".format(table_id, len(table.schema)))

# Configures the query to append the results to a destination table,
# allowing field addition.
job_config = bigquery.QueryJobConfig(destination=table_id)
job_config.schema_update_options = [
bigquery.SchemaUpdateOption.ALLOW_FIELD_ADDITION
]
job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND

# Start the query, passing in the extra configuration.
query_job = client.query(
# In this example, the existing table contains only the 'full_name' and
# 'age' columns, while the results of this query will contain an
# additional 'favorite_color' column.
'SELECT "Timmy" as full_name, 85 as age, "Blue" as favorite_color;',
job_config=job_config,
) # Make an API request.
query_job.result() # Wait for the job to complete.

# Checks the updated length of the schema.
table = client.get_table(table_id) # Make an API request.
print("Table {} now contains {} columns".format(table_id, len(table.schema)))
# [END bigquery_add_column_query_append]
46 changes: 46 additions & 0 deletions bigquery/samples/client_query_batch.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


def client_query_batch(client):

# [START bigquery_query_batch]
from google.cloud import bigquery

# TODO(developer): Construct a BigQuery client object.
# client = bigquery.Client()

job_config = bigquery.QueryJobConfig(
# Run at batch priority, which won't count toward concurrent rate limit.
priority=bigquery.QueryPriority.BATCH
)

sql = """
SELECT corpus
FROM `bigquery-public-data.samples.shakespeare`
GROUP BY corpus;
"""

# Start the query, passing in the extra configuration.
query_job = client.query(sql, job_config=job_config) # Make an API request.

# Check on the progress by getting the job's updated state. Once the state
# is `DONE`, the results are ready.
query_job = client.get_job(
query_job.job_id, location=query_job.location
) # Make an API request.

print("Job {} is currently in state {}".format(query_job.job_id, query_job.state))
# [END bigquery_query_batch]
return query_job
40 changes: 40 additions & 0 deletions bigquery/samples/client_query_destination_table.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


def client_query_destination_table(client, table_id):

# [START bigquery_query_destination_table]
from google.cloud import bigquery

# TODO(developer): Construct a BigQuery client object.
# client = bigquery.Client()

# TODO(developer): Set table_id to the ID of the destination table.
# table_id = "your-project.your_dataset.your_table_name"

job_config = bigquery.QueryJobConfig(destination=table_id)

sql = """
SELECT corpus
FROM `bigquery-public-data.samples.shakespeare`
GROUP BY corpus;
"""

# Start the query, passing in the extra configuration.
query_job = client.query(sql, job_config=job_config) # Make an API request.
query_job.result() # Wait for the job to complete.

print("Query results loaded to the table {}".format(table_id))
# [END bigquery_query_destination_table]
49 changes: 49 additions & 0 deletions bigquery/samples/client_query_destination_table_cmek.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


def client_query_destination_table_cmek(client, table_id, kms_key_name):

# [START bigquery_query_destination_table_cmek]
from google.cloud import bigquery

# TODO(developer): Construct a BigQuery client object.
# client = bigquery.Client()

# TODO(developer): Set table_id to the ID of the destination table.
# table_id = "your-project.your_dataset.your_table_name"

# Set the encryption key to use for the destination.
# TODO(developer): Replace this key with a key you have created in KMS.
# kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format(
# your-project, location, your-ring, your-key
# )
emar-kar marked this conversation as resolved.
Show resolved Hide resolved

job_config = bigquery.QueryJobConfig(
destination=table_id,
destination_encryption_configuration=bigquery.EncryptionConfiguration(
kms_key_name=kms_key_name
),
)

# Start the query, passing in the extra configuration.
query_job = client.query(
"SELECT 17 AS my_col;", job_config=job_config
) # Make an API request.
query_job.result() # Wait for the job to complete.

table = client.get_table(table_id) # Make an API request.
if table.encryption_configuration.kms_key_name == kms_key_name:
print("The destination table is written using the encryption configuration")
# [END bigquery_query_destination_table_cmek]
44 changes: 44 additions & 0 deletions bigquery/samples/client_query_destination_table_legacy.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


def client_query_destination_table_legacy(client, table_id):

# [START bigquery_query_legacy_large_results]
from google.cloud import bigquery

# TODO(developer): Construct a BigQuery client object.
# client = bigquery.Client()

# TODO(developer): Set table_id to the ID of the destination table.
# table_id = "your-project.your_dataset.your_table_name"

# Set the destination table and use_legacy_sql to True to use
# legacy SQL syntax.
job_config = bigquery.QueryJobConfig(
allow_large_results=True, destination=table_id, use_legacy_sql=True
)

sql = """
SELECT corpus
FROM [bigquery-public-data:samples.shakespeare]
GROUP BY corpus;
"""

# Start the query, passing in the extra configuration.
query_job = client.query(sql, job_config=job_config) # Make an API request.
query_job.result() # Wait for the job to complete.

print("Query results loaded to the table {}".format(table_id))
# [END bigquery_query_legacy_large_results]
40 changes: 40 additions & 0 deletions bigquery/samples/client_query_dry_run.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


def client_query_dry_run(client):

# [START bigquery_query_dry_run]
from google.cloud import bigquery

# TODO(developer): Construct a BigQuery client object.
# client = bigquery.Client()

job_config = bigquery.QueryJobConfig(dry_run=True, use_query_cache=False)

# Start the query, passing in the extra configuration.
query_job = client.query(
(
"SELECT name, COUNT(*) as name_count "
"FROM `bigquery-public-data.usa_names.usa_1910_2013` "
"WHERE state = 'WA' "
"GROUP BY name"
),
job_config=job_config,
) # Make an API request.

# A dry run query completes immediately.
print("This query will process {} bytes.".format(query_job.total_bytes_processed))
# [END bigquery_query_dry_run]
return query_job
39 changes: 39 additions & 0 deletions bigquery/samples/client_query_legacy_sql.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


def client_query_legacy_sql(client):

# [START bigquery_query_legacy]
from google.cloud import bigquery

# TODO(developer): Construct a BigQuery client object.
# client = bigquery.Client()

query = (
"SELECT name FROM [bigquery-public-data:usa_names.usa_1910_2013] "
'WHERE state = "TX" '
"LIMIT 100"
)

# Set use_legacy_sql to True to use legacy SQL syntax.
job_config = bigquery.QueryJobConfig(use_legacy_sql=True)

# Start the query, passing in the extra configuration.
query_job = client.query(query, job_config=job_config) # Make an API request.

print("The query data:")
for row in query_job:
print(row)
# [END bigquery_query_legacy]
Loading