From d33ea01a5f45eb9338d03e43fd42bdd4e9f5c695 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 18 Sep 2015 11:11:57 -0700 Subject: [PATCH] More consistency and standardization improvements. * Adding argparse/docstring to cloud storage and monitoring samples. * Updating readmes for BigQuery, Monitoring, Cloud Storage, and the Blog. * Removing all non-appengine requirements.txt files and consolidating into a single top-level requirements.txt. This makes it easier for us to verify that the samples work with the latest version of all dependent libraries. * Moved test dependences out of tox.ini and into requirements-dev.txt, as per standard practice. --- bigquery/README.md | 12 ++- .../README.md | 6 +- .../requirements.txt | 12 --- cloud_logging/README.md | 14 ++-- monitoring/README.md | 33 ++++++++ monitoring/api/auth.py | 75 +++++++------------ monitoring/api/auth_test.py | 2 + requirements-dev.txt | 17 +++++ requirements.txt | 24 +++--- storage/README.md | 40 +++++----- storage/api/compose_objects.py | 67 ++++++++--------- storage/api/compose_objects_test.py | 10 +-- storage/api/list_objects.py | 45 +++++------ storage/api/list_objects_test.py | 6 +- storage/requirements.txt | 1 - tox.ini | 13 +--- 16 files changed, 193 insertions(+), 184 deletions(-) delete mode 100644 blog/introduction_to_data_models_in_cloud_datastore/requirements.txt create mode 100644 monitoring/README.md create mode 100644 requirements-dev.txt delete mode 100644 storage/requirements.txt diff --git a/bigquery/README.md b/bigquery/README.md index ec0d694b2e96..0cc93f51e8f4 100644 --- a/bigquery/README.md +++ b/bigquery/README.md @@ -4,10 +4,16 @@ This section contains samples for [Google BigQuery](https://cloud.google.com/big ## Running the samples -In order to run it, your environment must be setup with [authentication -information](https://developers.google.com/identity/protocols/application-default-credentials#howtheywork). If you're running it in your local development environment and you have the [Google Cloud SDK](https://cloud.google.com/sdk/) installed, you can do this easily by running: +1. Your environment must be setup with [authentication +information](https://developers.google.com/identity/protocols/application-default-credentials#howtheywork). If you're running in your local development environment and you have the [Google Cloud SDK](https://cloud.google.com/sdk/) installed, you can do this easily by running: - $ gcloud auth login + $ gcloud auth login + +2. Install dependencies from the top-level [`requirements.txt`](../requirements.txt): + + $ pip install -r requirements.txt + +3. Depending on the sample, you may also need to create resources on the [Google Developers Console](https://console.developers.google.com). Refer to the sample description and associated documentation page. ## Additional resources diff --git a/blog/introduction_to_data_models_in_cloud_datastore/README.md b/blog/introduction_to_data_models_in_cloud_datastore/README.md index a94f51f39494..869bb57ede45 100644 --- a/blog/introduction_to_data_models_in_cloud_datastore/README.md +++ b/blog/introduction_to_data_models_in_cloud_datastore/README.md @@ -1,18 +1,20 @@ # Introduction to data models in Cloud Datastore -This sample code is used in [this blog post](). It demonstrates two data models +This sample code is used in [this blog post](http://googlecloudplatform.blogspot.com/2015/08/Introduction-to-data-models-in-Cloud-Datastore.html). It demonstrates two data models using [Google Cloud Datastore](https://cloud.google.com/datastore). ## Prerequisites 1. Create project with billing enabled on the [Google Developers Console](https://console.developers.google.com) + 2. [Enable the Datastore API](https://console.developers.google.com/project/_/apiui/apiview/datastore/overview). + 3. Install the [Google Cloud SDK](https://cloud.google.com/sdk) and be sure to run ``gcloud auth``. ## Running the samples -Install any dependencies: +Install dependencies from the top-level [`requirements.txt`](../../requirements.txt): pip install -r requirements.txt diff --git a/blog/introduction_to_data_models_in_cloud_datastore/requirements.txt b/blog/introduction_to_data_models_in_cloud_datastore/requirements.txt deleted file mode 100644 index 12e4cd8ca34d..000000000000 --- a/blog/introduction_to_data_models_in_cloud_datastore/requirements.txt +++ /dev/null @@ -1,12 +0,0 @@ -gcloud==0.7.0 -google-apitools==0.4.8 -httplib2==0.9.1 -oauth2client==1.4.12 -protobuf==3.0.0a1 -protorpc==0.10.0 -pyasn1==0.1.8 -pyasn1-modules==0.0.6 -pycrypto==2.6.1 -pytz==2015.4 -rsa==3.1.4 -six==1.9.0 diff --git a/cloud_logging/README.md b/cloud_logging/README.md index 31f06b852dec..aaf34d84c0ae 100644 --- a/cloud_logging/README.md +++ b/cloud_logging/README.md @@ -4,10 +4,16 @@ This section contains samples for [Google Cloud Logging](https://cloud.google.co ## Running the samples -In order to run it, your environment must be setup with [authentication -information](https://developers.google.com/identity/protocols/application-default-credentials#howtheywork). If you're running it in your local development environment and you have the [Google Cloud SDK](https://cloud.google.com/sdk/) installed, you can do this easily by running: +1. Your environment must be setup with [authentication +information](https://developers.google.com/identity/protocols/application-default-credentials#howtheywork). If you're running in your local development environment and you have the [Google Cloud SDK](https://cloud.google.com/sdk/) installed, you can do this easily by running: - $ gcloud auth login + $ gcloud auth login + +2. Install dependencies from the top-level [`requirements.txt`](../requirements.txt): + + $ pip install -r requirements.txt + +3. Depending on the sample, you may also need to create resources on the [Google Developers Console](https://console.developers.google.com). Refer to the sample description and associated documentation page. ## Additional resources @@ -23,5 +29,3 @@ can visit: For information on the Python Client Library visit: > https://developers.google.com/api-client-library/python - -## Other Samples diff --git a/monitoring/README.md b/monitoring/README.md new file mode 100644 index 000000000000..98c2b42d9e31 --- /dev/null +++ b/monitoring/README.md @@ -0,0 +1,33 @@ +# Google Cloud Monitoring Samples + +This section contains samples for [Google Cloud Monitoring](https://cloud.google.com/monitoring). + +## Running the samples + +1. Your environment must be setup with [authentication +information](https://developers.google.com/identity/protocols/application-default-credentials#howtheywork). *Note* that Cloud Monitoring does not currently work +with `gcloud auth`. You will need to use a *service account* when running +locally and set the `GOOGLE_APPLICATION_CREDENTIALS` environment variable. + + $ export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json + +2. Install dependencies from the top-level [`requirements.txt`](../requirements.txt): + + $ pip install -r requirements.txt + +3. Depending on the sample, you may also need to create resources on the [Google Developers Console](https://console.developers.google.com). Refer to the sample description and associated documentation page. + +## Additional resources + +For more information on Cloud Monitoring you can visit: + +> https://cloud.google.com/monitoring + +For more information on the Cloud Monitoring API Python library surface you +can visit: + +> https://developers.google.com/resources/api-libraries/documentation/storage/v2beta2/python/latest/ + +For information on the Python Client Library visit: + +> https://developers.google.com/api-client-library/python diff --git a/monitoring/api/auth.py b/monitoring/api/auth.py index 2207694af086..d62f03c03be9 100644 --- a/monitoring/api/auth.py +++ b/monitoring/api/auth.py @@ -1,3 +1,5 @@ +#!/usr/bin/env pyhton + # Copyright 2015 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,38 +16,18 @@ """Sample command-line program for retrieving Google Cloud Monitoring API data. -Simple command-line program to demonstrate connecting to the Google Cloud -Monitoring API to retrieve API data, using application default credentials to -authenticate. - -This sample obtains authentication information from its environment via -application default credentials [1]. - -If you're not running the sample on Google App Engine or Compute Engine (where -the environment comes pre-authenticated as a service account), you'll have to -initialize your environment with credentials the sample can use. - -One way to do this is through the cloud console's credentials page [2]. Create -a new client ID of type 'Service account', and download its JSON key. This will -be the account the sample authenticates as. - -Once you've downloaded the service account's JSON key, you provide it to the -sample by setting the GOOGLE_APPLICATION_CREDENTIALS environment variable to -point to the key file: +This sample is used on this page: -$ export GOOGLE_APPLICATION_CREDENTIALS=/path/to/json-key.json + https://cloud.google.com/monitoring/api/authentication -[1] https://developers.google.com/identity/protocols/\ - application-default-credentials -[2] https://console.developers.google.com/project/_/apiui/credential -""" # NOQA +For more information, see the README.md under /monitoring. +""" # [START all] +import argparse import json -import sys from googleapiclient.discovery import build - from oauth2client.client import GoogleCredentials @@ -53,42 +35,39 @@ YOUNGEST = '2015-01-01T00:00:00Z' -def ListTimeseries(project_name, service): +def list_timeseries(monitoring, project_name): """Query the Timeseries.list API method. Args: + monitoring: the CloudMonitoring service object. project_name: the name of the project you'd like to monitor. - service: the CloudMonitoring service object. """ + timeseries = monitoring.timeseries() - timeseries = service.timeseries() + response = timeseries.list( + project=project_name, metric=METRIC, youngest=YOUNGEST).execute() print('Timeseries.list raw response:') - try: - response = timeseries.list( - project=project_name, metric=METRIC, youngest=YOUNGEST).execute() - - print(json.dumps(response, - sort_keys=True, - indent=4, - separators=(',', ': '))) - except: - print('Error:') - for error in sys.exc_info(): - print(error) + print(json.dumps(response, + sort_keys=True, + indent=4, + separators=(',', ': '))) def main(project_name): - # Create and return the CloudMonitoring service object. - service = build('cloudmonitoring', 'v2beta2', - credentials=GoogleCredentials.get_application_default()) + credentials = GoogleCredentials.get_application_default() + monitoring = build('cloudmonitoring', 'v2beta2', credentials=credentials) - ListTimeseries(project_name, service) + list_timeseries(monitoring, project_name) if __name__ == '__main__': - if len(sys.argv) != 2: - print("Usage: {} ".format(sys.argv[0])) - sys.exit(1) - main(sys.argv[1]) + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument('project_id', help='Your Google Cloud project ID.') + + args = parser.parse_args() + + main(args.project_id) # [END all] diff --git a/monitoring/api/auth_test.py b/monitoring/api/auth_test.py index a0430bb47545..6a0aa356613d 100644 --- a/monitoring/api/auth_test.py +++ b/monitoring/api/auth_test.py @@ -23,7 +23,9 @@ class TestTimeseriesList(tests.CloudBaseTest): def test_main(self): with tests.capture_stdout() as stdout: auth.main(self.project_id) + output = stdout.getvalue().strip() + self.assertRegexpMatches( output, re.compile(r'Timeseries.list raw response:\s*' r'{\s*"kind": "[^"]+",' diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 000000000000..fdf4be6527be --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,17 @@ +beautifulsoup4==4.4.0 +coverage==3.7.1 +Flask==0.10.1 +funcsigs==0.4 +itsdangerous==0.24 +Jinja2==2.8 +MarkupSafe==0.23 +mock==1.3.0 +nose==1.3.7 +nose-exclude==0.4.1 +nosegae==0.5.7 +pbr==1.8.0 +PyYAML==3.11 +waitress==0.8.10 +WebOb==1.4.1 +WebTest==2.0.18 +Werkzeug==0.10.4 diff --git a/requirements.txt b/requirements.txt index 396a8981627e..0620888a4763 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,14 +1,14 @@ -argparse>=1.2.1 -google-api-python-client>=1.4.2 -httplib2>=0.9.1 -oauth2client>=1.5.1 -py==1.4.26 -pyasn1==0.1.7 -pyasn1-modules==0.0.5 -rsa==3.1.4 -simplejson==3.6.5 +gcloud==0.7.1 +google-api-python-client==1.4.2 +google-apitools==0.4.11 +httplib2==0.9.1 +oauth2client==1.5.1 +protobuf==3.0.0a1 +protorpc==0.11.1 +pyasn1==0.1.8 +pyasn1-modules==0.0.7 +pycrypto==2.6.1 +rsa==3.2 +simplejson==3.8.0 six==1.9.0 -tox==1.9.0 uritemplate==0.6 -virtualenv==12.0.7 -wsgiref==0.1.2 diff --git a/storage/README.md b/storage/README.md index b18c98d55c9b..f625f9f39d25 100644 --- a/storage/README.md +++ b/storage/README.md @@ -1,27 +1,31 @@ -## Python Samples for Google Cloud Storage +# Google Cloud Storage Samples -Two samples: +This section contains samples for [Google Cloud Storage](https://cloud.google.com/storage). -1. ``list_objects.py`` lists objects in a bucket. -2. ``compose_objects.py`` composes objects together to create another. +## Running the samples -See the docstring for each sample for usage, or run the sample for the help text. +1. Your environment must be setup with [authentication +information](https://developers.google.com/identity/protocols/application-default-credentials#howtheywork). If you're running in your local development environment and you have the [Google Cloud SDK](https://cloud.google.com/sdk/) installed, you can do this easily by running: -### Setup + $ gcloud auth login -Before running the samples, you'll need the Google Cloud SDK in order to setup authentication. +2. Install dependencies from the top-level [`requirements.txt`](../requirements.txt): -1. Install the [Google Cloud SDK](https://cloud.google.com/sdk/), including the [gcloud tool](https://cloud.google.com/sdk/gcloud/), and [gcloud app component](https://cloud.google.com/sdk/gcloud-app). -2. Setup the gcloud tool. + $ pip install -r requirements.txt - ``` - gcloud components update app - gcloud auth login - gcloud config set project - ``` +3. Depending on the sample, you may also need to create resources on the [Google Developers Console](https://console.developers.google.com). Refer to the sample description and associated documentation page. -You will also need to install the dependencies using [pip](https://pypi.python.org/pypi/pip): +## Additional resources -``` -pip install -r requirements.txt -``` +For more information on Cloud Storage you can visit: + +> https://cloud.google.com/storage + +For more information on the Cloud Storage API Python library surface you +can visit: + +> https://developers.google.com/resources/api-libraries/documentation/storage/v1/python/latest/ + +For information on the Python Client Library visit: + +> https://developers.google.com/api-client-library/python diff --git a/storage/api/compose_objects.py b/storage/api/compose_objects.py index b0619f4738a9..d3718d893466 100644 --- a/storage/api/compose_objects.py +++ b/storage/api/compose_objects.py @@ -1,5 +1,5 @@ -# -*- coding: utf-8 -*- -# +#!/usr/bin/env python + # Copyright (C) 2013 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,50 +13,34 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + # [START all] """Command-line sample application for composing objects using the Cloud Storage API. -Before running, authenticate with the Google Cloud SDK by running: - $ gcloud auth login +This sample is used on this page: + + https://cloud.google.com/storage/docs/json_api/v1/json-api-python-samples -Create a least two sample files: +For more information, see the README.md under /storage. + +To run, create a least two sample files: $ echo "File 1" > file1.txt $ echo "File 2" > file2.txt Example invocation: $ python compose_objects.py my-bucket destination.txt file1.txt file2.txt -Usage: - $ python compose_objects.py \ - [... ] - -You can also get help on all the command-line flags the program understands -by running: - $ python compose-sample.py --help - """ import argparse import json -import sys from apiclient import discovery from oauth2client.client import GoogleCredentials -# Parser for command-line arguments. -parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) -parser.add_argument('bucket') -parser.add_argument('destination', help='Destination file name') -parser.add_argument('sources', nargs='+', help='Source files to compose') - - -def main(argv): - # Parse the command-line flags. - args = parser.parse_args(argv[1:]) +def main(bucket, destination, sources): # Get the application default credentials. When running locally, these are # available after running `gcloud auth login`. When running on compute # engine, these are available from the environment. @@ -67,40 +51,53 @@ def main(argv): service = discovery.build('storage', 'v1', credentials=credentials) # Upload the source files. - for filename in args.sources: + for filename in sources: req = service.objects().insert( media_body=filename, name=filename, - bucket=args.bucket) + bucket=bucket) resp = req.execute() print('> Uploaded source file {}'.format(filename)) print(json.dumps(resp, indent=2)) # Construct a request to compose the source files into the destination. compose_req_body = { - 'sourceObjects': [{'name': filename} for filename in args.sources], + 'sourceObjects': [{'name': filename} for filename in sources], 'destination': { 'contentType': 'text/plain', # required } } + req = service.objects().compose( - destinationBucket=args.bucket, - destinationObject=args.destination, + destinationBucket=bucket, + destinationObject=destination, body=compose_req_body) + resp = req.execute() - print('> Composed files into {}'.format(args.destination)) + + print('> Composed files into {}'.format(destination)) print(json.dumps(resp, indent=2)) # Download and print the composed object. req = service.objects().get_media( - bucket=args.bucket, - object=args.destination) + bucket=bucket, + object=destination) res = req.execute() + print('> Composed file contents:') print(res) if __name__ == '__main__': - main(sys.argv) + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument('bucket', help='Your Cloud Storage bucket.') + parser.add_argument('destination', help='Destination file name.') + parser.add_argument('sources', nargs='+', help='Source files to compose.') + + args = parser.parse_args() + + main(parser.bucket, parser.destination, parser.sources) # [END all] diff --git a/storage/api/compose_objects_test.py b/storage/api/compose_objects_test.py index 7f37caa87b78..2084496a0ae1 100644 --- a/storage/api/compose_objects_test.py +++ b/storage/api/compose_objects_test.py @@ -20,11 +20,9 @@ class TestComposeObjects(CloudBaseTest): def test_main(self): - args = [ - 'ignored_command_name', + main( self.bucket_name, 'dest.txt', - os.path.join(self.resource_path, 'file1.txt'), - os.path.join(self.resource_path, 'file2.txt'), - ] - main(args) + [os.path.join(self.resource_path, 'file1.txt'), + os.path.join(self.resource_path, 'file2.txt')] + ) diff --git a/storage/api/list_objects.py b/storage/api/list_objects.py index ebb284202cf3..a30eeab66644 100644 --- a/storage/api/list_objects.py +++ b/storage/api/list_objects.py @@ -1,5 +1,5 @@ -# -*- coding: utf-8 -*- -# +#!/usr/bin/env python + # Copyright (C) 2013 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,40 +14,25 @@ # See the License for the specific language governing permissions and # limitations under the License. # [START all] -"""Command-line sample application for listing all objects -in a bucket using the Cloud Storage API. -Before running, authenticate with the Google Cloud SDK by running: - $ gcloud auth login +"""Command-line sample application for listing all objects in a bucket using +the Cloud Storage API. -Usage: - $ python list_objects.py +This sample is used on this page: -You can also get help on all the command-line flags the program understands -by running: - $ python list_objects.py --help + https://cloud.google.com/storage/docs/json_api/v1/json-api-python-samples +For more information, see the README.md under /storage. """ import argparse import json -import sys from apiclient import discovery from oauth2client.client import GoogleCredentials -# Parser for command-line arguments. -parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) -parser.add_argument('bucket') - - -def main(argv): - # Parse the command-line flags. - args = parser.parse_args(argv[1:]) - +def main(bucket): # [START list_bucket] # Get the application default credentials. When running locally, these are # available after running `gcloud auth login`. When running on compute @@ -62,7 +47,7 @@ def main(argv): # Make a request to buckets.get to retrieve a list of objects in the # specified bucket. - req = service.buckets().get(bucket=args.bucket) + req = service.buckets().get(bucket=bucket) resp = req.execute() print(json.dumps(resp, indent=2)) # [END list_bucket] @@ -70,7 +55,7 @@ def main(argv): # Create a request to objects.list to retrieve a list of objects. fields_to_return = \ 'nextPageToken,items(name,size,contentType,metadata(my-key))' - req = service.objects().list(bucket=args.bucket, fields=fields_to_return) + req = service.objects().list(bucket=bucket, fields=fields_to_return) # If you have too many items to list in one request, list_next() will # automatically handle paging with the pageToken. @@ -79,6 +64,14 @@ def main(argv): print(json.dumps(resp, indent=2)) req = service.objects().list_next(req, resp) + if __name__ == '__main__': - main(sys.argv) + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument('bucket', help='Your Cloud Storage bucket.') + + args = parser.parse_args() + + main(args.bucket) # [END all] diff --git a/storage/api/list_objects_test.py b/storage/api/list_objects_test.py index 1f990699daf1..bd632174fc98 100644 --- a/storage/api/list_objects_test.py +++ b/storage/api/list_objects_test.py @@ -18,8 +18,4 @@ class TestListObjects(CloudBaseTest): def test_main(self): - args = [ - 'ignored_command_name', - self.bucket_name - ] - main(args) + main(self.bucket_name) diff --git a/storage/requirements.txt b/storage/requirements.txt deleted file mode 100644 index 56bd11010dcc..000000000000 --- a/storage/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -google-api-python-client>=1.4.0 diff --git a/tox.ini b/tox.ini index 009ca087e601..d6d0fedee76d 100644 --- a/tox.ini +++ b/tox.ini @@ -6,14 +6,8 @@ envlist = gae, py27, py34, pep8 passenv = PYTHONPATH GOOGLE_* GCLOUD_* TEST_* TRAVIS* basepython = python2.7 deps = - pyyaml - google-api-python-client - flask - mock - nose - coverage - webtest - nose-exclude + -rrequirements.txt + -rrequirements-dev.txt coverargs = --with-coverage --cover-tests @@ -23,7 +17,6 @@ coverargs = [testenv:gae] deps = {[testenv]deps} - nosegae commands = nosetests --with-gae \ --gae-app=tests/resources/app.yaml \ @@ -36,7 +29,6 @@ setenv = [testenv:py27] deps = {[testenv]deps} - gcloud commands = nosetests \ --exclude-dir=appengine \ @@ -47,7 +39,6 @@ commands = basepython = python3.4 deps = {[testenv]deps} - gcloud commands = nosetests \ --exclude-dir=appengine \