From a38e13ae23816f9e33cffe8f50e0f9b6c18de2ab Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 24 Oct 2016 11:03:17 -0700 Subject: [PATCH] Generate readmes for most service samples (#599) --- bigquery/api/README.md | 18 - bigquery/api/README.rst | 395 +++++++++++++++++++ bigquery/api/README.rst.in | 46 +++ bigquery/cloud-client/README.md | 5 - bigquery/cloud-client/README.rst | 332 ++++++++++++++++ bigquery/cloud-client/README.rst.in | 43 ++ bigquery/cloud-client/async_query.py | 2 +- bigquery/cloud-client/sync_query.py | 2 +- bigquery/dml/README.md | 15 - bigquery/dml/README.rst | 150 +++++++ bigquery/dml/README.rst.in | 29 ++ bigquery/dml/insert_sql.py | 2 +- bigquery/dml/populate_db.py | 2 +- bigtable/hello/README.md | 107 ----- bigtable/hello/README.rst | 119 ++++++ bigtable/hello/README.rst.in | 21 + bigtable/hello_happybase/README.md | 109 ----- bigtable/hello_happybase/README.rst | 126 ++++++ bigtable/hello_happybase/README.rst.in | 30 ++ datastore/README.md | 8 - datastore/api/README.md | 4 - datastore/api/README.rst | 136 +++++++ datastore/api/README.rst.in | 24 ++ dns/README.md | 26 -- dns/api/README.md | 11 - dns/api/README.rst | 102 +++++ dns/api/README.rst.in | 22 ++ logging/api-client/README.rst | 87 ++++ logging/api-client/README.rst.in | 18 + logging/cloud-client/README.md | 36 -- logging/cloud-client/README.rst | 163 ++++++++ logging/cloud-client/README.rst.in | 26 ++ monitoring/README.md | 33 -- monitoring/api/v2/README.md | 11 - monitoring/api/v2/README.rst | 186 +++++++++ monitoring/api/v2/README.rst.in | 28 ++ monitoring/api/v3/README.md | 71 ---- monitoring/api/v3/README.rst | 136 +++++++ monitoring/api/v3/README.rst.in | 25 ++ pubsub/cloud-client/README.md | 17 - pubsub/cloud-client/README.rst | 203 ++++++++++ pubsub/cloud-client/README.rst.in | 28 ++ scripts/readme-gen/templates/README.tmpl.rst | 17 +- scripts/readme-gen/templates/auth.tmpl.rst | 4 +- storage/README.md | 31 -- storage/api/README.rst | 8 +- storage/cloud-client/README.rst | 35 +- storage/cloud-client/README.rst.in | 2 + 48 files changed, 2525 insertions(+), 526 deletions(-) delete mode 100644 bigquery/api/README.md create mode 100644 bigquery/api/README.rst create mode 100644 bigquery/api/README.rst.in delete mode 100644 bigquery/cloud-client/README.md create mode 100644 bigquery/cloud-client/README.rst create mode 100644 bigquery/cloud-client/README.rst.in delete mode 100644 bigquery/dml/README.md create mode 100644 bigquery/dml/README.rst create mode 100644 bigquery/dml/README.rst.in delete mode 100644 bigtable/hello/README.md create mode 100644 bigtable/hello/README.rst create mode 100644 bigtable/hello/README.rst.in delete mode 100644 bigtable/hello_happybase/README.md create mode 100644 bigtable/hello_happybase/README.rst create mode 100644 bigtable/hello_happybase/README.rst.in delete mode 100644 datastore/README.md delete mode 100644 datastore/api/README.md create mode 100644 datastore/api/README.rst create mode 100644 datastore/api/README.rst.in delete mode 100644 dns/README.md delete mode 100644 dns/api/README.md create mode 100644 dns/api/README.rst create mode 100644 dns/api/README.rst.in create mode 100644 logging/api-client/README.rst create mode 100644 logging/api-client/README.rst.in delete mode 100644 logging/cloud-client/README.md create mode 100644 logging/cloud-client/README.rst create mode 100644 logging/cloud-client/README.rst.in delete mode 100644 monitoring/README.md delete mode 100644 monitoring/api/v2/README.md create mode 100644 monitoring/api/v2/README.rst create mode 100644 monitoring/api/v2/README.rst.in delete mode 100644 monitoring/api/v3/README.md create mode 100644 monitoring/api/v3/README.rst create mode 100644 monitoring/api/v3/README.rst.in delete mode 100644 pubsub/cloud-client/README.md create mode 100644 pubsub/cloud-client/README.rst create mode 100644 pubsub/cloud-client/README.rst.in delete mode 100644 storage/README.md diff --git a/bigquery/api/README.md b/bigquery/api/README.md deleted file mode 100644 index 7676c9d27e43..000000000000 --- a/bigquery/api/README.md +++ /dev/null @@ -1,18 +0,0 @@ -# BigQuery API Samples - - -These samples are used on the following documentation pages: - -> -* https://cloud.google.com/bigquery/docs/loading-data-cloud-storage -* https://cloud.google.com/bigquery/create-simple-app-api -* https://cloud.google.com/bigquery/loading-data-post-request -* https://cloud.google.com/bigquery/authentication -* https://cloud.google.com/bigquery/exporting-data-from-bigquery -* https://cloud.google.com/bigquery/bigquery-api-quickstart -* https://cloud.google.com/bigquery/docs/managing_jobs_datasets_projects -* https://cloud.google.com/bigquery/streaming-data-into-bigquery -* https://cloud.google.com/bigquery/docs/data -* https://cloud.google.com/bigquery/querying-data - - diff --git a/bigquery/api/README.rst b/bigquery/api/README.rst new file mode 100644 index 000000000000..331c108d1ddc --- /dev/null +++ b/bigquery/api/README.rst @@ -0,0 +1,395 @@ +.. This file is automatically generated. Do not edit this file directly. + +Google BigQuery Python Samples +=============================================================================== + +This directory contains samples for Google BigQuery. `Google BigQuery`_ is Google's fully managed, petabyte scale, low cost analytics data warehouse. BigQuery is NoOps—there is no infrastructure to manage and you don't need a database administrator—so you can focus on analyzing data to find meaningful insights, use familiar SQL, and take advantage of our pay-as-you-go model. + + + + +.. _Google BigQuery: https://cloud.google.com/bigquery/docs + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +Authentication is typically done through `Application Default Credentials`_, +which means you do not have to change the code to authenticate as long as +your environment has credentials. You have a few options for setting up +authentication: + +#. When running locally, use the `Google Cloud SDK`_ + + .. code-block:: bash + + gcloud beta auth application-default login + + +#. When running on App Engine or Compute Engine, credentials are already + set-up. However, you may need to configure your Compute Engine instance + with `additional scopes`_. + +#. You can create a `Service Account key file`_. This file can be used to + authenticate to Google Cloud Platform services from any environment. To use + the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to + the path to the key file, for example: + + .. code-block:: bash + + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json + +.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using +.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount + +Install Dependencies +++++++++++++++++++++ + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Getting started ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python getting_started.py + + usage: getting_started.py [-h] project_id + + Command-line application that demonstrates basic BigQuery API usage. + + This sample queries a public shakespeare dataset and displays the 10 of + Shakespeare's works with the greatest number of distinct words. + + This sample is used on this page: + + https://cloud.google.com/bigquery/bigquery-api-quickstart + + For more information, see the README.md under /bigquery. + + positional arguments: + project_id Your Google Cloud Project ID. + + optional arguments: + -h, --help show this help message and exit + + +Sync query ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python sync_query.py + + usage: sync_query.py [-h] [-t TIMEOUT] [-r NUM_RETRIES] [-l USE_LEGACY_SQL] + project_id query + + Command-line application to perform an synchronous query in BigQuery. + + For more information, see the README.md under /bigquery. + + positional arguments: + project_id Your Google Cloud project ID. + query BigQuery SQL Query. + + optional arguments: + -h, --help show this help message and exit + -t TIMEOUT, --timeout TIMEOUT + Number seconds to wait for a result + -r NUM_RETRIES, --num_retries NUM_RETRIES + Number of times to retry in case of 500 error. + -l USE_LEGACY_SQL, --use_legacy_sql USE_LEGACY_SQL + Use legacy BigQuery SQL syntax instead of standard SQL + syntax. + + +Async query ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python async_query.py + + usage: async_query.py [-h] [-b] [-r NUM_RETRIES] [-p POLL_INTERVAL] + [-l USE_LEGACY_SQL] + project_id query + + Command-line application to perform an asynchronous query in BigQuery. + + For more information, see the README.md under /bigquery. + + positional arguments: + project_id Your Google Cloud project ID. + query BigQuery SQL Query. + + optional arguments: + -h, --help show this help message and exit + -b, --batch Run query in batch mode. + -r NUM_RETRIES, --num_retries NUM_RETRIES + Number of times to retry in case of 500 error. + -p POLL_INTERVAL, --poll_interval POLL_INTERVAL + How often to poll the query for completion (seconds). + -l USE_LEGACY_SQL, --use_legacy_sql USE_LEGACY_SQL + Use legacy BigQuery SQL syntax instead of standard SQL + syntax. + + +Listing datasets and projects ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python list_datasets_projects.py + + usage: list_datasets_projects.py [-h] project_id + + Command-line application to list all projects and datasets in BigQuery. + + This sample is used on this page: + + https://cloud.google.com/bigquery/docs/managing_jobs_datasets_projects + + For more information, see the README.md under /bigquery. + + positional arguments: + project_id the project id to list. + + optional arguments: + -h, --help show this help message and exit + + +Load data by POST ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python load_data_by_post.py + + usage: load_data_by_post.py [-h] + project_id dataset_id table_name schema_file + data_file + + Command-line application that loads data into BigQuery via HTTP POST. + + This sample is used on this page: + + https://cloud.google.com/bigquery/loading-data-into-bigquery + + For more information, see the README.md under /bigquery. + + positional arguments: + project_id Your Google Cloud project ID. + dataset_id A BigQuery dataset ID. + table_name Name of the table to load data into. + schema_file Path to a schema file describing the table schema. + data_file Path to the data file. + + optional arguments: + -h, --help show this help message and exit + + +Load data from CSV ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python load_data_from_csv.py + + usage: load_data_from_csv.py [-h] [-p POLL_INTERVAL] [-r NUM_RETRIES] + project_id dataset_id table_name schema_file + data_path + + Command-line application that loads data into BigQuery from a CSV file in + Google Cloud Storage. + + This sample is used on this page: + + https://cloud.google.com/bigquery/loading-data-into-bigquery#loaddatagcs + + For more information, see the README.md under /bigquery. + + positional arguments: + project_id Your Google Cloud project ID. + dataset_id A BigQuery dataset ID. + table_name Name of the table to load data into. + schema_file Path to a schema file describing the table schema. + data_path Google Cloud Storage path to the CSV data, for + example: gs://mybucket/in.csv + + optional arguments: + -h, --help show this help message and exit + -p POLL_INTERVAL, --poll_interval POLL_INTERVAL + How often to poll the query for completion (seconds). + -r NUM_RETRIES, --num_retries NUM_RETRIES + Number of times to retry in case of 500 error. + + +Load streaming data ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python streaming.py + + usage: streaming.py [-h] [-p POLL_INTERVAL] [-r NUM_RETRIES] + project_id dataset_id table_name + + Command-line application that streams data into BigQuery. + + This sample is used on this page: + + https://cloud.google.com/bigquery/streaming-data-into-bigquery + + For more information, see the README.md under /bigquery. + + positional arguments: + project_id Your Google Cloud project ID. + dataset_id A BigQuery dataset ID. + table_name Name of the table to load data into. + + optional arguments: + -h, --help show this help message and exit + -p POLL_INTERVAL, --poll_interval POLL_INTERVAL + How often to poll the query for completion (seconds). + -r NUM_RETRIES, --num_retries NUM_RETRIES + Number of times to retry in case of 500 error. + + +Export data to Cloud Storage ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python export_data_to_cloud_storage.py + + usage: export_data_to_cloud_storage.py [-h] [-p POLL_INTERVAL] + [-r NUM_RETRIES] [-z] + [-f {CSV,NEWLINE_DELIMITED_JSON,AVRO}] + project_id dataset_id table_id gcs_path + + Command-line application to export a table from BigQuery to Google Cloud + Storage. + + This sample is used on this page: + + https://cloud.google.com/bigquery/exporting-data-from-bigquery + + For more information, see the README.md under /bigquery. + + positional arguments: + project_id Your Google Cloud project ID. + dataset_id BigQuery dataset to export. + table_id BigQuery table to export. + gcs_path Google Cloud Storage path to store the exported data. + For example, gs://mybucket/mydata.csv + + optional arguments: + -h, --help show this help message and exit + -p POLL_INTERVAL, --poll_interval POLL_INTERVAL + How often to poll the query for completion (seconds). + -r NUM_RETRIES, --num_retries NUM_RETRIES + Number of times to retry in case of 500 error. + -z, --gzip compress resultset with gzip + -f {CSV,NEWLINE_DELIMITED_JSON,AVRO}, --format {CSV,NEWLINE_DELIMITED_JSON,AVRO} + output file format + + +User auth with an installed app ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python installed_app.py + + usage: installed_app.py [-h] [--auth_host_name AUTH_HOST_NAME] + [--noauth_local_webserver] + [--auth_host_port [AUTH_HOST_PORT [AUTH_HOST_PORT ...]]] + [--logging_level {DEBUG,INFO,WARNING,ERROR,CRITICAL}] + project_id + + Command-line application that demonstrates using BigQuery with credentials + obtained from an installed app. + + This sample is used on this page: + + https://cloud.google.com/bigquery/authentication + + For more information, see the README.md under /bigquery. + + positional arguments: + project_id Your Google Cloud Project ID. + + optional arguments: + -h, --help show this help message and exit + --auth_host_name AUTH_HOST_NAME + Hostname when running a local web server. + --noauth_local_webserver + Do not run a local web server. + --auth_host_port [AUTH_HOST_PORT [AUTH_HOST_PORT ...]] + Port web server should listen on. + --logging_level {DEBUG,INFO,WARNING,ERROR,CRITICAL} + Set the logging level of detail. + + + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/bigquery/api/README.rst.in b/bigquery/api/README.rst.in new file mode 100644 index 000000000000..70ff6ff0e147 --- /dev/null +++ b/bigquery/api/README.rst.in @@ -0,0 +1,46 @@ +# This file is used to generate README.rst + +product: + name: Google BigQuery + short_name: BigQuery + url: https://cloud.google.com/bigquery/docs + description: > + `Google BigQuery`_ is Google's fully managed, petabyte scale, low cost + analytics data warehouse. BigQuery is NoOps—there is no infrastructure to + manage and you don't need a database administrator—so you can focus on + analyzing data to find meaningful insights, use familiar SQL, and take + advantage of our pay-as-you-go model. + +setup: +- auth +- install_deps + +samples: +- name: Getting started + file: getting_started.py + show_help: true +- name: Sync query + file: sync_query.py + show_help: true +- name: Async query + file: async_query.py + show_help: true +- name: Listing datasets and projects + file: list_datasets_projects.py + show_help: true +- name: Load data by POST + file: load_data_by_post.py + show_help: true +- name: Load data from CSV + file: load_data_from_csv.py + show_help: true +- name: Load streaming data + file: streaming.py + show_help: true +- name: Export data to Cloud Storage + file: export_data_to_cloud_storage.py + show_help: true +- name: User auth with an installed app + file: installed_app.py + show_help: true + diff --git a/bigquery/cloud-client/README.md b/bigquery/cloud-client/README.md deleted file mode 100644 index 9c1f9b9fbe5a..000000000000 --- a/bigquery/cloud-client/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# BigQuery Google Cloud Client Library Samples - - - - diff --git a/bigquery/cloud-client/README.rst b/bigquery/cloud-client/README.rst new file mode 100644 index 000000000000..3fbb1716f3f1 --- /dev/null +++ b/bigquery/cloud-client/README.rst @@ -0,0 +1,332 @@ +.. This file is automatically generated. Do not edit this file directly. + +Google BigQuery Python Samples +=============================================================================== + +This directory contains samples for Google BigQuery. `Google BigQuery`_ is Google's fully managed, petabyte scale, low cost analytics data warehouse. BigQuery is NoOps—there is no infrastructure to manage and you don't need a database administrator—so you can focus on analyzing data to find meaningful insights, use familiar SQL, and take advantage of our pay-as-you-go model. + + + + +.. _Google BigQuery: https://cloud.google.com/bigquery/docs + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +Authentication is typically done through `Application Default Credentials`_, +which means you do not have to change the code to authenticate as long as +your environment has credentials. You have a few options for setting up +authentication: + +#. When running locally, use the `Google Cloud SDK`_ + + .. code-block:: bash + + gcloud beta auth application-default login + + +#. When running on App Engine or Compute Engine, credentials are already + set-up. However, you may need to configure your Compute Engine instance + with `additional scopes`_. + +#. You can create a `Service Account key file`_. This file can be used to + authenticate to Google Cloud Platform services from any environment. To use + the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to + the path to the key file, for example: + + .. code-block:: bash + + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json + +.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using +.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount + +Install Dependencies +++++++++++++++++++++ + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Quickstart ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python quickstart.py + + +Sync query ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python sync_query.py + + usage: sync_query.py [-h] query + + Command-line application to perform synchronous queries in BigQuery. + + For more information, see the README.md under /bigquery. + + Example invocation: + $ python sync_query.py \ + 'SELECT corpus FROM `publicdata.samples.shakespeare` GROUP BY corpus' + + positional arguments: + query BigQuery SQL Query. + + optional arguments: + -h, --help show this help message and exit + + +Async query ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python async_query.py + + usage: async_query.py [-h] query + + Command-line application to perform asynchronous queries in BigQuery. + + For more information, see the README.md under /bigquery. + + Example invocation: + $ python async_query.py 'SELECT corpus FROM `publicdata.samples.shakespeare` GROUP BY corpus' + + positional arguments: + query BigQuery SQL Query. + + optional arguments: + -h, --help show this help message and exit + + +Snippets ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python snippets.py + + usage: snippets.py [-h] [--project PROJECT] + {list-datasets,list-tables,create-table,list-rows,copy-table,delete-table} + ... + + Samples that demonstrate basic operations in the BigQuery API. + + For more information, see the README.md under /bigquery. + + Example invocation: + $ python snippets.py list-datasets + + The dataset and table should already exist. + + positional arguments: + {list-datasets,list-tables,create-table,list-rows,copy-table,delete-table} + list-datasets Lists all datasets in a given project. If no project + is specified, then the currently active project is + used + list-tables Lists all of the tables in a given dataset. If no + project is specified, then the currently active + project is used. + create-table Creates a simple table in the given dataset. If no + project is specified, then the currently active + project is used. + list-rows Prints rows in the given table. Will print 25 rows at + most for brevity as tables can contain large amounts + of rows. If no project is specified, then the + currently active project is used. + copy-table Copies a table. If no project is specified, then the + currently active project is used. + delete-table Deletes a table in a given dataset. If no project is + specified, then the currently active project is used. + + optional arguments: + -h, --help show this help message and exit + --project PROJECT + + +Load data from a file ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python load_data_from_file.py + + usage: load_data_from_file.py [-h] dataset_name table_name source_file_name + + Loads data into BigQuery from a local file. + + For more information, see the README.md under /bigquery. + + Example invocation: + $ python load_data_from_file.py example_dataset example_table example-data.csv + + The dataset and table should already exist. + + positional arguments: + dataset_name + table_name + source_file_name Path to a .csv file to upload. + + optional arguments: + -h, --help show this help message and exit + + +Load data from Cloud Storage ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python load_data_from_gcs.py + + usage: load_data_from_gcs.py [-h] dataset_name table_name source + + Loads data into BigQuery from an object in Google Cloud Storage. + + For more information, see the README.md under /bigquery. + + Example invocation: + $ python load_data_from_gcs.py example_dataset example_table gs://example-bucket/example-data.csv + + The dataset and table should already exist. + + positional arguments: + dataset_name + table_name + source The Google Cloud Storage object to load. Must be in the format + gs://bucket_name/object_name + + optional arguments: + -h, --help show this help message and exit + + +Load streaming data ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python stream_data.py + + usage: stream_data.py [-h] dataset_name table_name json_data + + Loads a single row of data directly into BigQuery. + + For more information, see the README.md under /bigquery. + + Example invocation: + $ python stream_data.py example_dataset example_table '["Gandalf", 2000]' + + The dataset and table should already exist. + + positional arguments: + dataset_name + table_name + json_data The row to load into BigQuery as an array in JSON format. + + optional arguments: + -h, --help show this help message and exit + + +Export data to Cloud Storage ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python export_data_to_gcs.py + + usage: export_data_to_gcs.py [-h] dataset_name table_name destination + + Exports data from BigQuery to an object in Google Cloud Storage. + + For more information, see the README.md under /bigquery. + + Example invocation: + $ python export_data_to_gcs.py example_dataset example_table gs://example-bucket/example-data.csv + + The dataset and table should already exist. + + positional arguments: + dataset_name + table_name + destination The desintation Google Cloud Storage object.Must be in the + format gs://bucket_name/object_name + + optional arguments: + -h, --help show this help message and exit + + + + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/bigquery/cloud-client/README.rst.in b/bigquery/cloud-client/README.rst.in new file mode 100644 index 000000000000..49143f062e6c --- /dev/null +++ b/bigquery/cloud-client/README.rst.in @@ -0,0 +1,43 @@ +# This file is used to generate README.rst + +product: + name: Google BigQuery + short_name: BigQuery + url: https://cloud.google.com/bigquery/docs + description: > + `Google BigQuery`_ is Google's fully managed, petabyte scale, low cost + analytics data warehouse. BigQuery is NoOps—there is no infrastructure to + manage and you don't need a database administrator—so you can focus on + analyzing data to find meaningful insights, use familiar SQL, and take + advantage of our pay-as-you-go model. + +setup: +- auth +- install_deps + +samples: +- name: Quickstart + file: quickstart.py +- name: Sync query + file: sync_query.py + show_help: true +- name: Async query + file: async_query.py + show_help: true +- name: Snippets + file: snippets.py + show_help: true +- name: Load data from a file + file: load_data_from_file.py + show_help: true +- name: Load data from Cloud Storage + file: load_data_from_gcs.py + show_help: true +- name: Load streaming data + file: stream_data.py + show_help: true +- name: Export data to Cloud Storage + file: export_data_to_gcs.py + show_help: true + +cloud_client_library: true diff --git a/bigquery/cloud-client/async_query.py b/bigquery/cloud-client/async_query.py index 2531c61528df..aa3397351175 100755 --- a/bigquery/cloud-client/async_query.py +++ b/bigquery/cloud-client/async_query.py @@ -19,7 +19,7 @@ For more information, see the README.md under /bigquery. Example invocation: - $ python async_query.py \ + $ python async_query.py \\ 'SELECT corpus FROM `publicdata.samples.shakespeare` GROUP BY corpus' """ diff --git a/bigquery/cloud-client/sync_query.py b/bigquery/cloud-client/sync_query.py index f21270ed0706..37c8fea8a653 100755 --- a/bigquery/cloud-client/sync_query.py +++ b/bigquery/cloud-client/sync_query.py @@ -19,7 +19,7 @@ For more information, see the README.md under /bigquery. Example invocation: - $ python sync_query.py \ + $ python sync_query.py \\ 'SELECT corpus FROM `publicdata.samples.shakespeare` GROUP BY corpus' """ diff --git a/bigquery/dml/README.md b/bigquery/dml/README.md deleted file mode 100644 index 39b167df93ef..000000000000 --- a/bigquery/dml/README.md +++ /dev/null @@ -1,15 +0,0 @@ -# BigQuery DML Samples - - -These samples are used on the following documentation page: - -> https://cloud.google.combigquery/docs/loading-data-sql-dml - - - -To create a test database, run the `populate_db.py` script. - -``` -python populate_db.py 100 localhost root 'mysql-password' sample_db -``` - diff --git a/bigquery/dml/README.rst b/bigquery/dml/README.rst new file mode 100644 index 000000000000..3bc4c4c8d6b3 --- /dev/null +++ b/bigquery/dml/README.rst @@ -0,0 +1,150 @@ +.. This file is automatically generated. Do not edit this file directly. + +Google BigQuery Python Samples +=============================================================================== + +This directory contains samples for Google BigQuery. `Google BigQuery`_ is Google's fully managed, petabyte scale, low cost analytics data warehouse. BigQuery is NoOps—there is no infrastructure to manage and you don't need a database administrator—so you can focus on analyzing data to find meaningful insights, use familiar SQL, and take advantage of our pay-as-you-go model. + + +This sample shows how to use Data Manipulation Language with BigQuery. + + +.. _Google BigQuery: https://cloud.google.com/bigquery/docs + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +Authentication is typically done through `Application Default Credentials`_, +which means you do not have to change the code to authenticate as long as +your environment has credentials. You have a few options for setting up +authentication: + +#. When running locally, use the `Google Cloud SDK`_ + + .. code-block:: bash + + gcloud beta auth application-default login + + +#. When running on App Engine or Compute Engine, credentials are already + set-up. However, you may need to configure your Compute Engine instance + with `additional scopes`_. + +#. You can create a `Service Account key file`_. This file can be used to + authenticate to Google Cloud Platform services from any environment. To use + the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to + the path to the key file, for example: + + .. code-block:: bash + + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json + +.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using +.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount + +Install Dependencies +++++++++++++++++++++ + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Populate sample DB ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python populate_db.py + + usage: populate_db.py [-h] total_users host user password db + + Command-line tool to simulate user actions and write to SQL database. + + positional arguments: + total_users How many simulated users to create. + host Host of the database to write to. + user User to connect to the database. + password Password for the database user. + db Name of the database to write to. + + optional arguments: + -h, --help show this help message and exit + + +Insert SQL ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python insert_sql.py + + usage: insert_sql.py [-h] project default_dataset sql_path + + Sample that runs a file containing INSERT SQL statements in Big Query. + + This could be used to run the INSERT statements in a mysqldump output such as + + mysqldump --user=root --password='secret-password' --host=127.0.0.1 --no-create-info sample_db --skip-add-locks > sample_db_export.sql + + To run, first create tables with the same names and columns as the sample + database. Then run this script. + + python insert_sql.py my-project my_dataset sample_db_export.sql + + positional arguments: + project Google Cloud project name + default_dataset Default BigQuery dataset name + sql_path Path to SQL file + + optional arguments: + -h, --help show this help message and exit + + + + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/bigquery/dml/README.rst.in b/bigquery/dml/README.rst.in new file mode 100644 index 000000000000..92fd9cd6df42 --- /dev/null +++ b/bigquery/dml/README.rst.in @@ -0,0 +1,29 @@ +# This file is used to generate README.rst + +product: + name: Google BigQuery + short_name: BigQuery + url: https://cloud.google.com/bigquery/docs + description: > + `Google BigQuery`_ is Google's fully managed, petabyte scale, low cost + analytics data warehouse. BigQuery is NoOps—there is no infrastructure to + manage and you don't need a database administrator—so you can focus on + analyzing data to find meaningful insights, use familiar SQL, and take + advantage of our pay-as-you-go model. + +description: | + This sample shows how to use Data Manipulation Language with BigQuery. + +setup: +- auth +- install_deps + +samples: +- name: Populate sample DB + file: populate_db.py + show_help: true +- name: Insert SQL + file: insert_sql.py + show_help: true + +cloud_client_library: true diff --git a/bigquery/dml/insert_sql.py b/bigquery/dml/insert_sql.py index e006da4aa4ba..2798be662482 100644 --- a/bigquery/dml/insert_sql.py +++ b/bigquery/dml/insert_sql.py @@ -69,7 +69,7 @@ def insert_sql(project, default_dataset, sql_path): formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('project', help='Google Cloud project name') parser.add_argument( - 'default_dataset', help='Default BigQuery dataset name') + 'default_dataset', help='Default BigQuery dataset name') parser.add_argument('sql_path', help='Path to SQL file') args = parser.parse_args() diff --git a/bigquery/dml/populate_db.py b/bigquery/dml/populate_db.py index ad05a88a732b..8b63d7897ed1 100755 --- a/bigquery/dml/populate_db.py +++ b/bigquery/dml/populate_db.py @@ -55,7 +55,7 @@ class UserSession(Base): id = sqlalchemy.Column(sqlalchemy.String(length=36), primary_key=True) user_id = sqlalchemy.Column( - sqlalchemy.Integer, sqlalchemy.ForeignKey('Users.id')) + sqlalchemy.Integer, sqlalchemy.ForeignKey('Users.id')) login_time = sqlalchemy.Column(sqlalchemy.DateTime) logout_time = sqlalchemy.Column(sqlalchemy.DateTime) ip_address = sqlalchemy.Column(sqlalchemy.String(length=40)) diff --git a/bigtable/hello/README.md b/bigtable/hello/README.md deleted file mode 100644 index 5057e2571b72..000000000000 --- a/bigtable/hello/README.md +++ /dev/null @@ -1,107 +0,0 @@ -# Cloud Bigtable Hello World - -This is a simple application that demonstrates using the [Google Cloud Client -Library][gcloud-python-bigtable] to connect to and interact with Cloud Bigtable. - - -These samples are used on the following documentation page: - -> https://cloud.google.com/bigtable/docs/samples-python-hello - - - -[gcloud-python-bigtable]: https://googlecloudplatform.github.io/gcloud-python/stable/bigtable-usage.html -[sample-docs]: https://cloud.google.com/bigtable/docs/samples-python-hello - - - - -**Table of Contents** - -- [Downloading the sample](#downloading-the-sample) -- [Costs](#costs) -- [Provisioning an instance](#provisioning-an-instance) -- [Running the application](#running-the-application) -- [Cleaning up](#cleaning-up) - - - - -## Downloading the sample - -Download the sample app and navigate into the app directory: - -1. Clone the [Python samples - repository](https://github.com/GoogleCloudPlatform/python-docs-samples), to - your local machine: - - git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git - - Alternatively, you can [download the - sample](https://github.com/GoogleCloudPlatform/python-docs-samples/archive/master.zip) - as a zip file and extract it. - -2. Change to the sample directory. - - cd python-docs-samples/bigtable/hello - - -## Costs - -This sample uses billable components of Cloud Platform, including: - -+ Google Cloud Bigtable - -Use the [Pricing Calculator][bigtable-pricing] to generate a cost estimate -based on your projected usage. New Cloud Platform users might be eligible for -a [free trial][free-trial]. - -[bigtable-pricing]: https://cloud.google.com/products/calculator/#id=1eb47664-13a2-4be1-9d16-6722902a7572 -[free-trial]: https://cloud.google.com/free-trial - - -## Provisioning an instance - -Follow the instructions in the [user -documentation](https://cloud.google.com/bigtable/docs/creating-instance) to -create a Google Cloud Platform project and Cloud Bigtable instance if necessary. -You'll need to reference your project id and instance id to run the -application. - - -## Running the application - -First, set your [Google Application Default Credentials](https://developers.google.com/identity/protocols/application-default-credentials) - -Install the dependencies with pip. - -``` -$ pip install -r requirements.txt -``` - -Run the application. Replace the command-line parameters with values for your instance. - -``` -$ python main.py my-project my-instance -``` - -You will see output resembling the following: - -``` -Create table Hello-Bigtable -Write some greetings to the table -Scan for all greetings: - greeting0: Hello World! - greeting1: Hello Cloud Bigtable! - greeting2: Hello HappyBase! -Delete table Hello-Bigtable -``` - - -## Cleaning up - -To avoid incurring extra charges to your Google Cloud Platform account, remove -the resources created for this sample. - -- [Delete the Cloud Bigtable - instance](https://cloud.google.com/bigtable/docs/deleting-instance). diff --git a/bigtable/hello/README.rst b/bigtable/hello/README.rst new file mode 100644 index 000000000000..a1a327f45594 --- /dev/null +++ b/bigtable/hello/README.rst @@ -0,0 +1,119 @@ +.. This file is automatically generated. Do not edit this file directly. + +Google Cloud Bigtable Python Samples +=============================================================================== + +This directory contains samples for Google Cloud Bigtable. `Google Cloud Bigtable`_ is Google's NoSQL Big Data database service. It's the same database that powers many core Google services, including Search, Analytics, Maps, and Gmail. + + + + +.. _Google Cloud Bigtable: https://cloud.google.com/bigtable/docs + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +Authentication is typically done through `Application Default Credentials`_, +which means you do not have to change the code to authenticate as long as +your environment has credentials. You have a few options for setting up +authentication: + +#. When running locally, use the `Google Cloud SDK`_ + + .. code-block:: bash + + gcloud beta auth application-default login + + +#. When running on App Engine or Compute Engine, credentials are already + set-up. However, you may need to configure your Compute Engine instance + with `additional scopes`_. + +#. You can create a `Service Account key file`_. This file can be used to + authenticate to Google Cloud Platform services from any environment. To use + the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to + the path to the key file, for example: + + .. code-block:: bash + + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json + +.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using +.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount + +Install Dependencies +++++++++++++++++++++ + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Basic example ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python main.py + + usage: main.py [-h] [--table TABLE] project_id instance_id + + Demonstrates how to connect to Cloud Bigtable and run some basic operations. + Prerequisites: - Create a Cloud Bigtable cluster. + https://cloud.google.com/bigtable/docs/creating-cluster - Set your Google + Application Default Credentials. + https://developers.google.com/identity/protocols/application-default- + credentials + + positional arguments: + project_id Your Cloud Platform project ID. + instance_id ID of the Cloud Bigtable instance to connect to. + + optional arguments: + -h, --help show this help message and exit + --table TABLE Table to create and destroy. (default: Hello-Bigtable) + + + + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/bigtable/hello/README.rst.in b/bigtable/hello/README.rst.in new file mode 100644 index 000000000000..7551bc0de7b8 --- /dev/null +++ b/bigtable/hello/README.rst.in @@ -0,0 +1,21 @@ +# This file is used to generate README.rst + +product: + name: Google Cloud Bigtable + short_name: Cloud Bigtable + url: https://cloud.google.com/bigtable/docs + description: > + `Google Cloud Bigtable`_ is Google's NoSQL Big Data database service. It's + the same database that powers many core Google services, including Search, + Analytics, Maps, and Gmail. + +setup: +- auth +- install_deps + +samples: +- name: Basic example + file: main.py + show_help: true + +cloud_client_library: true diff --git a/bigtable/hello_happybase/README.md b/bigtable/hello_happybase/README.md deleted file mode 100644 index d904eb969047..000000000000 --- a/bigtable/hello_happybase/README.md +++ /dev/null @@ -1,109 +0,0 @@ -# Cloud Bigtable Hello World via the HappyBase API - -This is a simple application that demonstrates using the [Google Cloud Client -Library HappyBase package][gcloud-python-happybase], an implementation of the [HappyBase -API][happybase] to connect to and interact with Cloud Bigtable. - - -These samples are used on the following documentation page: - -> https://cloud.google.com/bigtable/docs/samples-python-hello-happybase - - - -[gcloud-python-happybase]: https://github.com/GoogleCloudPlatform/google-cloud-python-happybase -[happybase]: http://happybase.readthedocs.io/en/stable/ -[sample-docs]: https://cloud.google.com/bigtable/docs/samples-python-hello-happybase - - - - -**Table of Contents** - -- [Downloading the sample](#downloading-the-sample) -- [Costs](#costs) -- [Provisioning an instance](#provisioning-an-instance) -- [Running the application](#running-the-application) -- [Cleaning up](#cleaning-up) - - - - -## Downloading the sample - -Download the sample app and navigate into the app directory: - -1. Clone the [Python samples - repository](https://github.com/GoogleCloudPlatform/python-docs-samples), to - your local machine: - - git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git - - Alternatively, you can [download the - sample](https://github.com/GoogleCloudPlatform/python-docs-samples/archive/master.zip) - as a zip file and extract it. - -2. Change to the sample directory. - - cd python-docs-samples/bigtable/hello_happybase - - -## Costs - -This sample uses billable components of Cloud Platform, including: - -+ Google Cloud Bigtable - -Use the [Pricing Calculator][bigtable-pricing] to generate a cost estimate -based on your projected usage. New Cloud Platform users might be eligible for -a [free trial][free-trial]. - -[bigtable-pricing]: https://cloud.google.com/products/calculator/#id=1eb47664-13a2-4be1-9d16-6722902a7572 -[free-trial]: https://cloud.google.com/free-trial - - -## Provisioning an instance - -Follow the instructions in the [user -documentation](https://cloud.google.com/bigtable/docs/creating-instance) to -create a Google Cloud Platform project and Cloud Bigtable instance if necessary. -You'll need to reference your project id and instance id to run the -application. - - -## Running the application - -First, set your [Google Application Default Credentials](https://developers.google.com/identity/protocols/application-default-credentials) - -Install the dependencies with pip. - -``` -$ pip install -r requirements.txt -``` - -Run the application. Replace the command-line parameters with values for your instance. - -``` -$ python main.py my-project my-instance -``` - -You will see output resembling the following: - -``` -Create table Hello-Bigtable -Write some greetings to the table -Scan for all greetings: - greeting0: Hello World! - greeting1: Hello Cloud Bigtable! - greeting2: Hello HappyBase! -Delete table Hello-Bigtable -``` - - -## Cleaning up - -To avoid incurring extra charges to your Google Cloud Platform account, remove -the resources created for this sample. - -- [Delete the Cloud Bigtable - instance](https://cloud.google.com/bigtable/docs/deleting-instance). diff --git a/bigtable/hello_happybase/README.rst b/bigtable/hello_happybase/README.rst new file mode 100644 index 000000000000..56dbbf485309 --- /dev/null +++ b/bigtable/hello_happybase/README.rst @@ -0,0 +1,126 @@ +.. This file is automatically generated. Do not edit this file directly. + +Google Cloud Bigtable Python Samples +=============================================================================== + +This directory contains samples for Google Cloud Bigtable. `Google Cloud Bigtable`_ is Google's NoSQL Big Data database service. It's the same database that powers many core Google services, including Search, Analytics, Maps, and Gmail. + + +This sample demonstrates using the `Google Cloud Client Library HappyBase +package`_, an implementation of the `HappyBase API`_ to connect to and +interact with Cloud Bigtable. + +.. _Google Cloud Client Library HappyBase package: + https://github.com/GoogleCloudPlatform/google-cloud-python-happybase +.. _HappyBase API: http://happybase.readthedocs.io/en/stable/ + + +.. _Google Cloud Bigtable: https://cloud.google.com/bigtable/docs + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +Authentication is typically done through `Application Default Credentials`_, +which means you do not have to change the code to authenticate as long as +your environment has credentials. You have a few options for setting up +authentication: + +#. When running locally, use the `Google Cloud SDK`_ + + .. code-block:: bash + + gcloud beta auth application-default login + + +#. When running on App Engine or Compute Engine, credentials are already + set-up. However, you may need to configure your Compute Engine instance + with `additional scopes`_. + +#. You can create a `Service Account key file`_. This file can be used to + authenticate to Google Cloud Platform services from any environment. To use + the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to + the path to the key file, for example: + + .. code-block:: bash + + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json + +.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using +.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount + +Install Dependencies +++++++++++++++++++++ + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Basic example ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python main.py + + usage: main.py [-h] [--table TABLE] project_id instance_id + + Demonstrates how to connect to Cloud Bigtable and run some basic operations. + Prerequisites: - Create a Cloud Bigtable cluster. + https://cloud.google.com/bigtable/docs/creating-cluster - Set your Google + Application Default Credentials. + https://developers.google.com/identity/protocols/application-default- + credentials + + positional arguments: + project_id Your Cloud Platform project ID. + instance_id ID of the Cloud Bigtable instance to connect to. + + optional arguments: + -h, --help show this help message and exit + --table TABLE Table to create and destroy. (default: Hello-Bigtable) + + + + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/bigtable/hello_happybase/README.rst.in b/bigtable/hello_happybase/README.rst.in new file mode 100644 index 000000000000..4bb363a36cfe --- /dev/null +++ b/bigtable/hello_happybase/README.rst.in @@ -0,0 +1,30 @@ +# This file is used to generate README.rst + +product: + name: Google Cloud Bigtable + short_name: Cloud Bigtable + url: https://cloud.google.com/bigtable/docs + description: > + `Google Cloud Bigtable`_ is Google's NoSQL Big Data database service. It's + the same database that powers many core Google services, including Search, + Analytics, Maps, and Gmail. + +description: | + This sample demonstrates using the `Google Cloud Client Library HappyBase + package`_, an implementation of the `HappyBase API`_ to connect to and + interact with Cloud Bigtable. + + .. _Google Cloud Client Library HappyBase package: + https://github.com/GoogleCloudPlatform/google-cloud-python-happybase + .. _HappyBase API: http://happybase.readthedocs.io/en/stable/ + +setup: +- auth +- install_deps + +samples: +- name: Basic example + file: main.py + show_help: true + +cloud_client_library: true diff --git a/datastore/README.md b/datastore/README.md deleted file mode 100644 index ebea408ecfa1..000000000000 --- a/datastore/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# Google Cloud Datastore Samples - -This section contains samples for [Google Cloud Datastore](https://cloud.google.com/datastore). - -## Other Samples - -* [Google App Engine & NDB](../appengine/ndb). -* [Blog Sample: Introduction to Data Models in Cloud Datastore](../blog/introduction_to_data_models_in_cloud_datastore). diff --git a/datastore/api/README.md b/datastore/api/README.md deleted file mode 100644 index a53482eda9d6..000000000000 --- a/datastore/api/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# Cloud Datastore API Samples - - - diff --git a/datastore/api/README.rst b/datastore/api/README.rst new file mode 100644 index 000000000000..0b81b06297da --- /dev/null +++ b/datastore/api/README.rst @@ -0,0 +1,136 @@ +.. This file is automatically generated. Do not edit this file directly. + +Google Cloud Datastore Python Samples +=============================================================================== + +This directory contains samples for Google Cloud Datastore. `Google Cloud Datastore`_ is a NoSQL document database built for automatic scaling, high performance, and ease of application development. + + + + +.. _Google Cloud Datastore: https://cloud.google.com/datastore/docs + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +Authentication is typically done through `Application Default Credentials`_, +which means you do not have to change the code to authenticate as long as +your environment has credentials. You have a few options for setting up +authentication: + +#. When running locally, use the `Google Cloud SDK`_ + + .. code-block:: bash + + gcloud beta auth application-default login + + +#. When running on App Engine or Compute Engine, credentials are already + set-up. However, you may need to configure your Compute Engine instance + with `additional scopes`_. + +#. You can create a `Service Account key file`_. This file can be used to + authenticate to Google Cloud Platform services from any environment. To use + the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to + the path to the key file, for example: + + .. code-block:: bash + + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json + +.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using +.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount + +Install Dependencies +++++++++++++++++++++ + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Quickstart ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python quickstart.py + + +Tasks example app ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python tasks.py + + +Snippets ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python snippets.py + + usage: snippets.py [-h] project_id + + Demonstrates datastore API operations. + + positional arguments: + project_id Your cloud project ID. + + optional arguments: + -h, --help show this help message and exit + + + + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/datastore/api/README.rst.in b/datastore/api/README.rst.in new file mode 100644 index 000000000000..29bf4c53ec25 --- /dev/null +++ b/datastore/api/README.rst.in @@ -0,0 +1,24 @@ +# This file is used to generate README.rst + +product: + name: Google Cloud Datastore + short_name: Cloud Datastore + url: https://cloud.google.com/datastore/docs + description: > + `Google Cloud Datastore`_ is a NoSQL document database built for automatic + scaling, high performance, and ease of application development. + +setup: +- auth +- install_deps + +samples: +- name: Quickstart + file: quickstart.py +- name: Tasks example app + file: tasks.py +- name: Snippets + file: snippets.py + show_help: true + +cloud_client_library: true diff --git a/dns/README.md b/dns/README.md deleted file mode 100644 index 526f21ee3e44..000000000000 --- a/dns/README.md +++ /dev/null @@ -1,26 +0,0 @@ -# Google Cloud DNS Samples - -This section contains samples for [Google Cloud DNS](https://cloud.google.com/dns). - -## Running the samples - -1. Your environment must be setup with [authentication -information](https://developers.google.com/identity/protocols/application-default-credentials#howtheywork). If you're running in your local development environment and you have the [Google Cloud SDK](https://cloud.google.com/sdk/) installed, you can do this easily by running: - - $ gcloud init - -2. Install dependencies from `requirements.txt`: - - $ pip install -r requirements.txt - -3. Depending on the sample, you may also need to create resources on the [Google Developers Console](https://console.developers.google.com). Refer to the sample description and associated documentation page. - -## Additional resources - -For more information on Cloud Storage you can visit: - -> https://cloud.google.com/dns - -For information on the Python Cloud Client Library visit: - -> https://googlecloudplatform.github.io/gcloud-python diff --git a/dns/api/README.md b/dns/api/README.md deleted file mode 100644 index 12bf8b8c81be..000000000000 --- a/dns/api/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# Cloud DNS API Samples - - -These samples are used on the following documentation pages: - -> -* https://cloud.google.com/dns/records/ -* https://cloud.google.com/dns/zones/ -* https://cloud.google.com/dns/monitoring - - diff --git a/dns/api/README.rst b/dns/api/README.rst new file mode 100644 index 000000000000..6f287100a193 --- /dev/null +++ b/dns/api/README.rst @@ -0,0 +1,102 @@ +.. This file is automatically generated. Do not edit this file directly. + +Google Cloud DNS Python Samples +=============================================================================== + +This directory contains samples for Google Cloud DNS. `Google Cloud DNS`_ allows you publish your domain names using Google's infrastructure for production-quality, high-volume DNS services. Google's global network of anycast name servers provide reliable, low-latency authoritative name lookups for your domains from anywhere in the world. + + + + +.. _Google Cloud DNS: https://cloud.google.com/dns/docs + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +Authentication is typically done through `Application Default Credentials`_, +which means you do not have to change the code to authenticate as long as +your environment has credentials. You have a few options for setting up +authentication: + +#. When running locally, use the `Google Cloud SDK`_ + + .. code-block:: bash + + gcloud beta auth application-default login + + +#. When running on App Engine or Compute Engine, credentials are already + set-up. However, you may need to configure your Compute Engine instance + with `additional scopes`_. + +#. You can create a `Service Account key file`_. This file can be used to + authenticate to Google Cloud Platform services from any environment. To use + the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to + the path to the key file, for example: + + .. code-block:: bash + + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json + +.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using +.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount + +Install Dependencies +++++++++++++++++++++ + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Snippets ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python main.py + + + + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/dns/api/README.rst.in b/dns/api/README.rst.in new file mode 100644 index 000000000000..f321135c76bb --- /dev/null +++ b/dns/api/README.rst.in @@ -0,0 +1,22 @@ +# This file is used to generate README.rst + +product: + name: Google Cloud DNS + short_name: Cloud DNS + url: https://cloud.google.com/dns/docs + description: > + `Google Cloud DNS`_ allows you publish your domain names using Google's + infrastructure for production-quality, high-volume DNS services. + Google's global network of anycast name servers provide reliable, + low-latency authoritative name lookups for your domains from anywhere + in the world. + +setup: +- auth +- install_deps + +samples: +- name: Snippets + file: main.py + +cloud_client_library: true diff --git a/logging/api-client/README.rst b/logging/api-client/README.rst new file mode 100644 index 000000000000..d230807b2795 --- /dev/null +++ b/logging/api-client/README.rst @@ -0,0 +1,87 @@ +.. This file is automatically generated. Do not edit this file directly. + +Stackdriver Logging Python Samples +=============================================================================== + +This directory contains samples for Stackdriver Logging. `Stackdriver Logging`_ allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud Platform and Amazon Web Services. + + + + +.. _Stackdriver Logging: https://cloud.google.com/logging/docs + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +Authentication is typically done through `Application Default Credentials`_, +which means you do not have to change the code to authenticate as long as +your environment has credentials. You have a few options for setting up +authentication: + +#. When running locally, use the `Google Cloud SDK`_ + + .. code-block:: bash + + gcloud beta auth application-default login + + +#. When running on App Engine or Compute Engine, credentials are already + set-up. However, you may need to configure your Compute Engine instance + with `additional scopes`_. + +#. You can create a `Service Account key file`_. This file can be used to + authenticate to Google Cloud Platform services from any environment. To use + the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to + the path to the key file, for example: + + .. code-block:: bash + + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json + +.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using +.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount + +Install Dependencies +++++++++++++++++++++ + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +List logs ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python list_logs.py + + + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/logging/api-client/README.rst.in b/logging/api-client/README.rst.in new file mode 100644 index 000000000000..5585c9193178 --- /dev/null +++ b/logging/api-client/README.rst.in @@ -0,0 +1,18 @@ +# This file is used to generate README.rst + +product: + name: Stackdriver Logging + short_name: Stackdriver Logging + url: https://cloud.google.com/logging/docs + description: > + `Stackdriver Logging`_ allows you to store, search, analyze, monitor, + and alert on log data and events from Google Cloud Platform and Amazon + Web Services. + +setup: +- auth +- install_deps + +samples: +- name: List logs + file: list_logs.py diff --git a/logging/cloud-client/README.md b/logging/cloud-client/README.md deleted file mode 100644 index c9042db88ff3..000000000000 --- a/logging/cloud-client/README.md +++ /dev/null @@ -1,36 +0,0 @@ -# Stackdriver Logging v2 API Samples - -`snippets.py` is a simple command-line program to demonstrate writing to a log, -listing its entries, and deleting it. - -`export.py` demonstrates how to interact with sinks which are used to export -logs to Google Cloud Storage, Cloud Pub/Sub, or BigQuery. The sample uses -Google Cloud Storage, but can be easily adapted for other outputs. - - - - -## Prerequisites - -All samples require a [Google Cloud Project](https://console.cloud.google.com). - -To run `export.py`, you will also need a Google Cloud Storage Bucket. - - gsutil mb gs://[YOUR_PROJECT_ID] - -You must add Cloud Logging as an owner to the bucket. To do so, add -`cloud-logs@google.com` as an owner to the bucket. See the -[exportings logs](https://cloud.google.com/logging/docs/export/configure_export#configuring_log_sinks) -docs for complete details. - -# Running locally - -Use the [Cloud SDK](https://cloud.google.com/sdk) to provide authentication: - - gcloud beta auth application-default login - -Run the samples: - - python snippets.py -h - python export.py -h - diff --git a/logging/cloud-client/README.rst b/logging/cloud-client/README.rst new file mode 100644 index 000000000000..2647c799cff9 --- /dev/null +++ b/logging/cloud-client/README.rst @@ -0,0 +1,163 @@ +.. This file is automatically generated. Do not edit this file directly. + +Stackdriver Logging Python Samples +=============================================================================== + +This directory contains samples for Stackdriver Logging. `Stackdriver Logging`_ allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud Platform and Amazon Web Services. + + + + +.. _Stackdriver Logging: https://cloud.google.com/logging/docs + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +Authentication is typically done through `Application Default Credentials`_, +which means you do not have to change the code to authenticate as long as +your environment has credentials. You have a few options for setting up +authentication: + +#. When running locally, use the `Google Cloud SDK`_ + + .. code-block:: bash + + gcloud beta auth application-default login + + +#. When running on App Engine or Compute Engine, credentials are already + set-up. However, you may need to configure your Compute Engine instance + with `additional scopes`_. + +#. You can create a `Service Account key file`_. This file can be used to + authenticate to Google Cloud Platform services from any environment. To use + the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to + the path to the key file, for example: + + .. code-block:: bash + + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json + +.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using +.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount + +Install Dependencies +++++++++++++++++++++ + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Quickstart ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python quickstart.py + + +Snippets ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python snippets.py + + usage: snippets.py [-h] logger_name {list,write,delete} ... + + This application demonstrates how to perform basic operations on logs and + log entries with Stackdriver Logging. + + For more information, see the README.md under /logging and the + documentation at https://cloud.google.com/logging/docs. + + positional arguments: + logger_name Logger name + {list,write,delete} + list Lists the most recent entries for a given logger. + write Writes log entries to the given logger. + delete Deletes a logger and all its entries. Note that a + deletion can take several minutes to take effect. + + optional arguments: + -h, --help show this help message and exit + + +Export ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python export.py + + usage: export.py [-h] {list,create,update,delete} ... + + positional arguments: + {list,create,update,delete} + list Lists all sinks. + create Lists all sinks. + update Changes a sink's filter. The filter determines which + logs this sink matches and will be exported to the + destination. For example a filter of 'severity>=INFO' + will send all logs that have a severity of INFO or + greater to the destination. See https://cloud.google.c + om/logging/docs/view/advanced_filters for more filter + information. + delete Deletes a sink. + + optional arguments: + -h, --help show this help message and exit + + + + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/logging/cloud-client/README.rst.in b/logging/cloud-client/README.rst.in new file mode 100644 index 000000000000..50862fa1da9b --- /dev/null +++ b/logging/cloud-client/README.rst.in @@ -0,0 +1,26 @@ +# This file is used to generate README.rst + +product: + name: Stackdriver Logging + short_name: Stackdriver Logging + url: https://cloud.google.com/logging/docs + description: > + `Stackdriver Logging`_ allows you to store, search, analyze, monitor, + and alert on log data and events from Google Cloud Platform and Amazon + Web Services. + +setup: +- auth +- install_deps + +samples: +- name: Quickstart + file: quickstart.py +- name: Snippets + file: snippets.py + show_help: true +- name: Export + file: export.py + show_help: true + +cloud_client_library: true diff --git a/monitoring/README.md b/monitoring/README.md deleted file mode 100644 index 0e15897fd73d..000000000000 --- a/monitoring/README.md +++ /dev/null @@ -1,33 +0,0 @@ -# Google Cloud Monitoring Samples - -This section contains samples for [Google Cloud Monitoring](https://cloud.google.com/monitoring). - -## Running the samples - -1. Your environment must be setup with [authentication -information](https://developers.google.com/identity/protocols/application-default-credentials#howtheywork). *Note* that Cloud Monitoring does not currently work -with `gcloud auth`. You will need to use a *service account* when running -locally and set the `GOOGLE_APPLICATION_CREDENTIALS` environment variable. - - $ export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json - -2. Install dependencies from `requirements.txt`: - - $ pip install -r requirements.txt - -3. Depending on the sample, you may also need to create resources on the [Google Developers Console](https://console.developers.google.com). Refer to the sample description and associated documentation page. - -## Additional resources - -For more information on Cloud Monitoring you can visit: - -> https://cloud.google.com/monitoring - -For more information on the Stackdriver Monitoring API Python library surface you -can visit: - -> https://developers.google.com/resources/api-libraries/documentation/storage/v2beta2/python/latest/ - -For information on the Python Client Library visit: - -> https://developers.google.com/api-client-library/python diff --git a/monitoring/api/v2/README.md b/monitoring/api/v2/README.md deleted file mode 100644 index 876edbe7fc3d..000000000000 --- a/monitoring/api/v2/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# Stackdriver Monitoring API Samples - - -These samples are used on the following documentation page: - -> https://cloud.google.com/monitoring/demos/ - - -m/monitoring/api/authentication - - diff --git a/monitoring/api/v2/README.rst b/monitoring/api/v2/README.rst new file mode 100644 index 000000000000..c072a30ae49f --- /dev/null +++ b/monitoring/api/v2/README.rst @@ -0,0 +1,186 @@ +.. This file is automatically generated. Do not edit this file directly. + +Stackdriver Monitoring Python Samples +=============================================================================== + +This directory contains samples for Stackdriver Monitoring. `Stackdriver Monitoring `_ collects metrics, events, and metadata from Google Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, application instrumentation, and a variety of common application components including Cassandra, Nginx, Apache Web Server, Elasticsearch and many others. Stackdriver ingests that data and generates insights via dashboards, charts, and alerts. + + + + +.. _Stackdriver Monitoring: https://cloud.google.com/monitoring/docs + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +Authentication is typically done through `Application Default Credentials`_, +which means you do not have to change the code to authenticate as long as +your environment has credentials. You have a few options for setting up +authentication: + +#. When running locally, use the `Google Cloud SDK`_ + + .. code-block:: bash + + gcloud beta auth application-default login + + +#. When running on App Engine or Compute Engine, credentials are already + set-up. However, you may need to configure your Compute Engine instance + with `additional scopes`_. + +#. You can create a `Service Account key file`_. This file can be used to + authenticate to Google Cloud Platform services from any environment. To use + the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to + the path to the key file, for example: + + .. code-block:: bash + + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json + +.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using +.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount + +Install Dependencies +++++++++++++++++++++ + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Authenticating & basic operations ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python auth.py + + usage: auth.py [-h] project_id + + Sample command-line program for retrieving Stackdriver Monitoring API data. + + Prerequisites: To run locally, download a Service Account JSON file from + your project and point GOOGLE_APPLICATION_CREDENTIALS to the file. + + This sample is used on this page: + + https://cloud.google.com/monitoring/api/authentication + + For more information, see the README.md under /monitoring. + + positional arguments: + project_id Your Google Cloud project ID. + + optional arguments: + -h, --help show this help message and exit + + +Labeled custom metrics ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python labeled_custom_metric.py + + usage: labeled_custom_metric.py [-h] --project_id PROJECT_ID --color COLOR + --size SIZE --count COUNT + + Creates, writes, and reads a labeled custom metric. + + This is an example of how to use the Stackdriver Monitoring API to create, + write, and read a labeled custom metric. + The metric has two labels: color and size, and the data points represent + the number of shirts of the given color and size in inventory. + + Prerequisites: To run locally, download a Service Account JSON file from + your project and point GOOGLE_APPLICATION_CREDENTIALS to the file. + + From App Engine or a GCE instance with the correct scope, the Service + Account step is not required. + + Typical usage: Run the following shell commands on the instance: + python labeled_custom_metric.py --project_id / + --color yellow --size large --count 10 + + optional arguments: + -h, --help show this help message and exit + --project_id PROJECT_ID + Project ID you want to access. + --color COLOR + --size SIZE + --count COUNT + + +Lightweight custom metrics ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python labeled_custom_metric.py + + usage: labeled_custom_metric.py [-h] --project_id PROJECT_ID --color COLOR + --size SIZE --count COUNT + + Creates, writes, and reads a labeled custom metric. + + This is an example of how to use the Stackdriver Monitoring API to create, + write, and read a labeled custom metric. + The metric has two labels: color and size, and the data points represent + the number of shirts of the given color and size in inventory. + + Prerequisites: To run locally, download a Service Account JSON file from + your project and point GOOGLE_APPLICATION_CREDENTIALS to the file. + + From App Engine or a GCE instance with the correct scope, the Service + Account step is not required. + + Typical usage: Run the following shell commands on the instance: + python labeled_custom_metric.py --project_id / + --color yellow --size large --count 10 + + optional arguments: + -h, --help show this help message and exit + --project_id PROJECT_ID + Project ID you want to access. + --color COLOR + --size SIZE + --count COUNT + + + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/monitoring/api/v2/README.rst.in b/monitoring/api/v2/README.rst.in new file mode 100644 index 000000000000..42c44b5798ae --- /dev/null +++ b/monitoring/api/v2/README.rst.in @@ -0,0 +1,28 @@ +# This file is used to generate README.rst + +product: + name: Stackdriver Monitoring + short_name: Stackdriver Monitoring + url: https://cloud.google.com/monitoring/docs + description: > + `Stackdriver Monitoring `_ collects metrics, events, and metadata from + Google Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, + application instrumentation, and a variety of common application components + including Cassandra, Nginx, Apache Web Server, Elasticsearch and many + others. Stackdriver ingests that data and generates insights via + dashboards, charts, and alerts. + +setup: +- auth +- install_deps + +samples: +- name: Authenticating & basic operations + file: auth.py + show_help: true +- name: Labeled custom metrics + file: labeled_custom_metric.py + show_help: true +- name: Lightweight custom metrics + file: labeled_custom_metric.py + show_help: true diff --git a/monitoring/api/v3/README.md b/monitoring/api/v3/README.md deleted file mode 100644 index b163cc91b5f6..000000000000 --- a/monitoring/api/v3/README.md +++ /dev/null @@ -1,71 +0,0 @@ -# Cloud Monitoring v3 Sample - -Sample command-line programs for retrieving Stackdriver Monitoring API V3 data. - -`list_resources.py` is a simple command-line program to demonstrate connecting to the Google -Monitoring API to retrieve API data and print out some of the resources. - -`custom_metric.py` demonstrates how to create a custom metric and write a TimeSeries -value to it. - -## Prerequisites to run locally: - -* [pip](https://pypi.python.org/pypi/pip) - -Go to the [Google Cloud Console](https://console.cloud.google.com). - - -# Set Up Your Local Dev Environment -To install, run the following commands. If you want to use [virtualenv](https://virtualenv.readthedocs.org/en/latest/) -(recommended), run the commands within a virtualenv. - - * pip install -r requirements.txt - -Create local credentials by running the following command and following the oauth2 flow: - - gcloud beta auth application-default login - -To run: - - python list_resources.py --project_id= - python custom_metric.py --project_id= Credentials -* Click 'New Credentials', and create a Service Account or [click here](https://console.cloud.google -.com/project/_/apiui/credential/serviceaccount) - Download the JSON for this service account, and set the `GOOGLE_APPLICATION_CREDENTIALS` - environment variable to point to the file containing the JSON credentials. - - - export GOOGLE_APPLICATION_CREDENTIALS=~/Downloads/-0123456789abcdef.json - - -## Contributing changes - -* See [CONTRIBUTING.md](CONTRIBUTING.md) - -## Licensing - -* See [LICENSE](LICENSE) - - diff --git a/monitoring/api/v3/README.rst b/monitoring/api/v3/README.rst new file mode 100644 index 000000000000..3a510d580d81 --- /dev/null +++ b/monitoring/api/v3/README.rst @@ -0,0 +1,136 @@ +.. This file is automatically generated. Do not edit this file directly. + +Stackdriver Monitoring Python Samples +=============================================================================== + +This directory contains samples for Stackdriver Monitoring. `Stackdriver Monitoring `_ collects metrics, events, and metadata from Google Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, application instrumentation, and a variety of common application components including Cassandra, Nginx, Apache Web Server, Elasticsearch and many others. Stackdriver ingests that data and generates insights via dashboards, charts, and alerts. + + + + +.. _Stackdriver Monitoring: https://cloud.google.com/monitoring/docs + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +Authentication is typically done through `Application Default Credentials`_, +which means you do not have to change the code to authenticate as long as +your environment has credentials. You have a few options for setting up +authentication: + +#. When running locally, use the `Google Cloud SDK`_ + + .. code-block:: bash + + gcloud beta auth application-default login + + +#. When running on App Engine or Compute Engine, credentials are already + set-up. However, you may need to configure your Compute Engine instance + with `additional scopes`_. + +#. You can create a `Service Account key file`_. This file can be used to + authenticate to Google Cloud Platform services from any environment. To use + the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to + the path to the key file, for example: + + .. code-block:: bash + + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json + +.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using +.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount + +Install Dependencies +++++++++++++++++++++ + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +List resources ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python list_resources.py + + usage: list_resources.py [-h] --project_id PROJECT_ID + + Sample command-line program for retrieving Stackdriver Monitoring API V3 + data. + + See README.md for instructions on setting up your development environment. + + To run locally: + + python list_resources.py --project_id= + + optional arguments: + -h, --help show this help message and exit + --project_id PROJECT_ID + Project ID you want to access. + + +Custom metrics ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python custom_metric.py + + usage: custom_metric.py [-h] --project_id PROJECT_ID + + Sample command-line program for writing and reading Stackdriver Monitoring + API V3 custom metrics. + + Simple command-line program to demonstrate connecting to the Google + Monitoring API to write custom metrics and read them back. + + See README.md for instructions on setting up your development environment. + + This example creates a custom metric based on a hypothetical GAUGE measurement. + + To run locally: + + python custom_metric.py --project_id= + + optional arguments: + -h, --help show this help message and exit + --project_id PROJECT_ID + Project ID you want to access. + + + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/monitoring/api/v3/README.rst.in b/monitoring/api/v3/README.rst.in new file mode 100644 index 000000000000..1b5d94ddf284 --- /dev/null +++ b/monitoring/api/v3/README.rst.in @@ -0,0 +1,25 @@ +# This file is used to generate README.rst + +product: + name: Stackdriver Monitoring + short_name: Stackdriver Monitoring + url: https://cloud.google.com/monitoring/docs + description: > + `Stackdriver Monitoring `_ collects metrics, events, and metadata from + Google Cloud Platform, Amazon Web Services (AWS), hosted uptime probes, + application instrumentation, and a variety of common application components + including Cassandra, Nginx, Apache Web Server, Elasticsearch and many + others. Stackdriver ingests that data and generates insights via + dashboards, charts, and alerts. + +setup: +- auth +- install_deps + +samples: +- name: List resources + file: list_resources.py + show_help: true +- name: Custom metrics + file: custom_metric.py + show_help: true diff --git a/pubsub/cloud-client/README.md b/pubsub/cloud-client/README.md deleted file mode 100644 index 00c7d2f7ac95..000000000000 --- a/pubsub/cloud-client/README.md +++ /dev/null @@ -1,17 +0,0 @@ -# Google Cloud Pub/Sub Samples - - - - -## Prerequisites - -All samples require a [Google Cloud Project](https://console.cloud.google.com). - -Use the [Cloud SDK](https://cloud.google.com/sdk) to provide authentication: - - gcloud beta auth application-default login - -Run the samples: - - python publisher.py -h - python subscriber.py -h diff --git a/pubsub/cloud-client/README.rst b/pubsub/cloud-client/README.rst new file mode 100644 index 000000000000..8ea32bab0be7 --- /dev/null +++ b/pubsub/cloud-client/README.rst @@ -0,0 +1,203 @@ +.. This file is automatically generated. Do not edit this file directly. + +Google Cloud Pub/Sub Python Samples +=============================================================================== + +This directory contains samples for Google Cloud Pub/Sub. `Google Cloud Pub/Sub`_ is a fully-managed real-time messaging service that allows you to send and receive messages between independent applications. + + + + +.. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/docs + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +Authentication is typically done through `Application Default Credentials`_, +which means you do not have to change the code to authenticate as long as +your environment has credentials. You have a few options for setting up +authentication: + +#. When running locally, use the `Google Cloud SDK`_ + + .. code-block:: bash + + gcloud beta auth application-default login + + +#. When running on App Engine or Compute Engine, credentials are already + set-up. However, you may need to configure your Compute Engine instance + with `additional scopes`_. + +#. You can create a `Service Account key file`_. This file can be used to + authenticate to Google Cloud Platform services from any environment. To use + the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to + the path to the key file, for example: + + .. code-block:: bash + + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json + +.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using +.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount + +Install Dependencies +++++++++++++++++++++ + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Quickstart ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python quickstart.py + + +Publisher ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python publisher.py + + usage: publisher.py [-h] {list,create,delete,publish} ... + + This application demonstrates how to perform basic operations on topics + with the Cloud Pub/Sub API. + + For more information, see the README.md under /pubsub and the documentation + at https://cloud.google.com/pubsub/docs. + + positional arguments: + {list,create,delete,publish} + list Lists all Pub/Sub topics in the current project. + create Create a new Pub/Sub topic. + delete Deletes an existing Pub/Sub topic. + publish Publishes a message to a Pub/Sub topic with the given + data. + + optional arguments: + -h, --help show this help message and exit + + +Subscribers ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python subscriber.py + + usage: subscriber.py [-h] {list,create,delete,receive} ... + + This application demonstrates how to perform basic operations on + subscriptions with the Cloud Pub/Sub API. + + For more information, see the README.md under /pubsub and the documentation + at https://cloud.google.com/pubsub/docs. + + positional arguments: + {list,create,delete,receive} + list Lists all subscriptions for a given topic. + create Create a new pull subscription on the given topic. + delete Deletes an existing Pub/Sub topic. + receive Receives a message from a pull subscription. + + optional arguments: + -h, --help show this help message and exit + + +Identity and Access Management ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python iam.py + + usage: iam.py [-h] + {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} + ... + + This application demonstrates how to perform basic operations on IAM + policies with the Cloud Pub/Sub API. + + For more information, see the README.md under /pubsub and the documentation + at https://cloud.google.com/pubsub/docs. + + positional arguments: + {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} + get-topic-policy Prints the IAM policy for the given topic. + get-subscription-policy + Prints the IAM policy for the given subscription. + set-topic-policy Sets the IAM policy for a topic. + set-subscription-policy + Sets the IAM policy for a topic. + check-topic-permissions + Checks to which permissions are available on the given + topic. + check-subscription-permissions + Checks to which permissions are available on the given + subscription. + + optional arguments: + -h, --help show this help message and exit + + + + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/pubsub/cloud-client/README.rst.in b/pubsub/cloud-client/README.rst.in new file mode 100644 index 000000000000..6a9fd00c722b --- /dev/null +++ b/pubsub/cloud-client/README.rst.in @@ -0,0 +1,28 @@ +# This file is used to generate README.rst + +product: + name: Google Cloud Pub/Sub + short_name: Cloud Pub/Sub + url: https://cloud.google.com/pubsub/docs + description: > + `Google Cloud Pub/Sub`_ is a fully-managed real-time messaging service that + allows you to send and receive messages between independent applications. + +setup: +- auth +- install_deps + +samples: +- name: Quickstart + file: quickstart.py +- name: Publisher + file: publisher.py + show_help: true +- name: Subscribers + file: subscriber.py + show_help: true +- name: Identity and Access Management + file: iam.py + show_help: true + +cloud_client_library: true diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst index 1d960a72404e..9acd1ba227aa 100644 --- a/scripts/readme-gen/templates/README.tmpl.rst +++ b/scripts/readme-gen/templates/README.tmpl.rst @@ -7,6 +7,8 @@ This directory contains samples for {{product.name}}. {{product.description}} +{{description}} + .. _{{product.name}}: {{product.url}} {% if setup %} @@ -49,13 +51,16 @@ To run this sample: The client library ------------------------------------------------------------------------------- -This sample uses the `Google Cloud Client Library for Python `_. +This sample uses the `Google Cloud Client Library for Python`_. You can read the documentation for more details on API usage and use GitHub -to `browse the source `_ and `report issues `_. - -.. ccl-docs: https://googlecloudplatform.github.io/google-cloud-python/ -.. ccl-source: https://github.com/GoogleCloudPlatform/google-cloud-python -.. ccl-issues: https://github.com/GoogleCloudPlatform/google-cloud-python/issues +to `browse the source`_ and `report issues`_. + +.. Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues {% endif %} diff --git a/scripts/readme-gen/templates/auth.tmpl.rst b/scripts/readme-gen/templates/auth.tmpl.rst index 3a6f1abaa0de..b4ce5b66bfa2 100644 --- a/scripts/readme-gen/templates/auth.tmpl.rst +++ b/scripts/readme-gen/templates/auth.tmpl.rst @@ -17,7 +17,7 @@ authentication: #. When running on App Engine or Compute Engine, credentials are already set-up. However, you may need to configure your Compute Engine instance - with `additional scopes `_. + with `additional scopes`_. #. You can create a `Service Account key file`_. This file can be used to authenticate to Google Cloud Platform services from any environment. To use @@ -29,5 +29,5 @@ authentication: export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json .. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow -.. _gce-auth: https://cloud.google.com/compute/docs/authentication#using +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using .. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount diff --git a/storage/README.md b/storage/README.md deleted file mode 100644 index 7476a656d5ad..000000000000 --- a/storage/README.md +++ /dev/null @@ -1,31 +0,0 @@ -# Google Cloud Storage Samples - -This section contains samples for [Google Cloud Storage](https://cloud.google.com/storage). - -## Running the samples - -1. Your environment must be setup with [authentication -information](https://developers.google.com/identity/protocols/application-default-credentials#howtheywork). If you're running in your local development environment and you have the [Google Cloud SDK](https://cloud.google.com/sdk/) installed, you can do this easily by running: - - $ gcloud init - -2. Install dependencies from `requirements.txt`: - - $ pip install -r requirements.txt - -3. Depending on the sample, you may also need to create resources on the [Google Developers Console](https://console.developers.google.com). Refer to the sample description and associated documentation page. - -## Additional resources - -For more information on Cloud Storage you can visit: - -> https://cloud.google.com/storage - -For more information on the Cloud Storage API Python library surface you -can visit: - -> https://developers.google.com/resources/api-libraries/documentation/storage/v1/python/latest/ - -For information on the Python Client Library visit: - -> https://developers.google.com/api-client-library/python diff --git a/storage/api/README.rst b/storage/api/README.rst index a045e1d96f92..d99acec37a36 100644 --- a/storage/api/README.rst +++ b/storage/api/README.rst @@ -6,6 +6,8 @@ Google Cloud Storage Python Samples This directory contains samples for Google Cloud Storage. `Google Cloud Storage`_ allows world-wide storage and retrieval of any amount of data at any time. + + .. _Google Cloud Storage: https://cloud.google.com/storage/docs Setup @@ -16,7 +18,7 @@ Authentication ++++++++++++++ Authentication is typically done through `Application Default Credentials`_, -this means you do not have to change the code to authenticate as long as +which means you do not have to change the code to authenticate as long as your environment has credentials. You have a few options for setting up authentication: @@ -29,7 +31,7 @@ authentication: #. When running on App Engine or Compute Engine, credentials are already set-up. However, you may need to configure your Compute Engine instance - with `additional scopes `_. + with `additional scopes`_. #. You can create a `Service Account key file`_. This file can be used to authenticate to Google Cloud Platform services from any environment. To use @@ -41,7 +43,7 @@ authentication: export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json .. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow -.. _gce-auth: https://cloud.google.com/compute/docs/authentication#using +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using .. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount Install Dependencies diff --git a/storage/cloud-client/README.rst b/storage/cloud-client/README.rst index ea49a4af3efa..89f5365446bd 100644 --- a/storage/cloud-client/README.rst +++ b/storage/cloud-client/README.rst @@ -6,6 +6,8 @@ Google Cloud Storage Python Samples This directory contains samples for Google Cloud Storage. `Google Cloud Storage`_ allows world-wide storage and retrieval of any amount of data at any time. + + .. _Google Cloud Storage: https://cloud.google.com/storage/docs Setup @@ -16,7 +18,7 @@ Authentication ++++++++++++++ Authentication is typically done through `Application Default Credentials`_, -this means you do not have to change the code to authenticate as long as +which means you do not have to change the code to authenticate as long as your environment has credentials. You have a few options for setting up authentication: @@ -29,7 +31,7 @@ authentication: #. When running on App Engine or Compute Engine, credentials are already set-up. However, you may need to configure your Compute Engine instance - with `additional scopes `_. + with `additional scopes`_. #. You can create a `Service Account key file`_. This file can be used to authenticate to Google Cloud Platform services from any environment. To use @@ -41,7 +43,7 @@ authentication: export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json .. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow -.. _gce-auth: https://cloud.google.com/compute/docs/authentication#using +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using .. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount Install Dependencies @@ -68,6 +70,18 @@ Install Dependencies Samples ------------------------------------------------------------------------------- +Quickstart ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python quickstart.py + + Snippets +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -223,13 +237,16 @@ To run this sample: The client library ------------------------------------------------------------------------------- -This sample uses the `Google Cloud Client Library for Python `_. +This sample uses the `Google Cloud Client Library for Python`_. You can read the documentation for more details on API usage and use GitHub -to `browse the source `_ and `report issues `_. - -.. ccl-docs: https://googlecloudplatform.github.io/google-cloud-python/ -.. ccl-source: https://github.com/GoogleCloudPlatform/google-cloud-python -.. ccl-issues: https://github.com/GoogleCloudPlatform/google-cloud-python/issues +to `browse the source`_ and `report issues`_. + +.. Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues .. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/storage/cloud-client/README.rst.in b/storage/cloud-client/README.rst.in index 83cf9a5e328c..2a6e37ff7745 100644 --- a/storage/cloud-client/README.rst.in +++ b/storage/cloud-client/README.rst.in @@ -13,6 +13,8 @@ setup: - install_deps samples: +- name: Quickstart + file: quickstart.py - name: Snippets file: snippets.py show_help: true