diff --git a/.travis.yml b/.travis.yml index 114c1e2f2b21..5e2061042320 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,7 +6,7 @@ cache: directories: - $HOME/gcloud/ env: - - PATH=$PATH:$HOME/gcloud/google-cloud-sdk/bin GOOGLE_APPLICATION_CREDENTIALS=$TRAVIS_BUILD_DIR/python-docs-samples.json PYTHONPATH=${HOME}/gcloud/google-cloud-sdk/platform/google_appengine TEST_BUCKET_NAME=bigquery-devrel-samples-bucket TEST_PROJECT_ID=bigquery-devrel-samples #Other environment variables on same line + - PATH=$PATH:$HOME/gcloud/google-cloud-sdk/bin GOOGLE_APPLICATION_CREDENTIALS=$TRAVIS_BUILD_DIR/python-docs-samples.json GAE_PYTHONPATH=${HOME}/gcloud/google-cloud-sdk/platform/google_appengine TEST_BUCKET_NAME=bigquery-devrel-samples-bucket TEST_PROJECT_ID=bigquery-devrel-samples #Other environment variables on same line before_install: #ENCRYPT YOUR PRIVATE KEY (If you need authentication) @@ -35,7 +35,10 @@ before_install: fi install: - - pip install tox + - pip install tox coveralls script: - tox + +after_success: + coveralls diff --git a/appengine/images/tests/test_guestbook.py b/appengine/images/tests/test_guestbook.py index fa5e7b91c7c5..6daa09dec323 100644 --- a/appengine/images/tests/test_guestbook.py +++ b/appengine/images/tests/test_guestbook.py @@ -21,6 +21,12 @@ class TestHandlers(DatastoreTestbedCase): + def setUp(self): + super(TestHandlers, self).setUp() + + # Workaround for other tests clobbering our Greeting model. + reload(main) + def test_get(self): # Build a request object passing the URI path to be tested. # You can also pass headers, query arguments etc. diff --git a/bigquery/tests/test_appengine_auth.py b/bigquery/tests/appengine/test_appengine_auth.py similarity index 100% rename from bigquery/tests/test_appengine_auth.py rename to bigquery/tests/appengine/test_appengine_auth.py diff --git a/blog/README.md b/blog/README.md new file mode 100644 index 000000000000..90eb34d19222 --- /dev/null +++ b/blog/README.md @@ -0,0 +1,5 @@ +# Blog Sample Code + +This directory contains samples used in the +[Cloud Platform Blog](http://cloud.google.com/blog). Each sample should have a +readme with instructions and a link to its respective blog post. diff --git a/blog/introduction-to-data-models-in-cloud-datastore/README.md b/blog/introduction-to-data-models-in-cloud-datastore/README.md new file mode 100644 index 000000000000..1620f5dc016e --- /dev/null +++ b/blog/introduction-to-data-models-in-cloud-datastore/README.md @@ -0,0 +1,22 @@ +# Introduction to data models in Cloud Datastore + +This sample code is used in [this blog post](). It demonstrates two data models +using [Google Cloud Datstore](https://cloud.google.com/datastore). + +## Prerequisites + +1. Create project with billing enabled on the [Google Developers Console](https://console.developers.google.com) +2. [Enable the Datastore API](https://console.developers.google.com/project/_/apiui/apiview/datastore/overview). +3. Install the [Google Cloud SDK](https://cloud.google.com/sdk) and be sure to run ``gcloud auth``. + + +## Running the samples + +Install any dependencies: + + pip install -r requirements.txt + +And run the samples: + + python blog.py your-project-id + python wiki.py your-project-id diff --git a/blog/introduction-to-data-models-in-cloud-datastore/blog.py b/blog/introduction-to-data-models-in-cloud-datastore/blog.py new file mode 100644 index 000000000000..09b28e468806 --- /dev/null +++ b/blog/introduction-to-data-models-in-cloud-datastore/blog.py @@ -0,0 +1,129 @@ +# Copyright 2015, Google, Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import datetime + +from gcloud import datastore + + +def path_to_key(datastore, path): + """ + Translates a file system path to a datastore key. The basename becomes the + key name and the extension becomes the kind. + + Examples: + /file.ext -> key(ext, file) + /parent.ext/file.ext -> key(ext, parent, ext, file) + """ + key_parts = [] + path_parts = path.strip(u'/').split(u'/') + for n, x in enumerate(path_parts): + name, ext = x.rsplit('.', 1) + key_parts.extend([ext, name]) + + return datastore.key(*key_parts) + + +def create_user(ds, username, profile): + key = path_to_key(ds, '{0}.user'.format(username)) + entity = datastore.Entity(key) + entity.update(profile) + ds.put(entity) + + +def create_post(ds, username, post_content): + now = datetime.datetime.utcnow() + key = path_to_key(ds, '{0}.user/{1}.post'.format(username, now)) + entity = datastore.Entity(key) + + entity.update({ + 'created': now, + 'created_by': username, + 'content': post_content + }) + + ds.put(entity) + + +def repost(ds, username, original): + now = datetime.datetime.utcnow() + new_key = path_to_key(ds, '{0}.user/{1}.post'.format(username, now)) + new = datastore.Entity(new_key) + + new.update(original) + + ds.put(new) + + +def list_posts_by_user(ds, username): + user_key = path_to_key(ds, '{0}.user'.format(username)) + return ds.query(kind='post', ancestor=user_key).fetch() + + +def list_all_posts(ds): + return ds.query(kind='post').fetch() + + +def main(project_id): + ds = datastore.Client(dataset_id=project_id) + + print("Creating users...") + create_user(ds, 'tonystark', + {'name': 'Tony Stark', 'location': 'Stark Island'}) + create_user(ds, 'peterparker', + {'name': 'Peter Parker', 'location': 'New York City'}) + + print("Creating posts...") + for n in range(1, 10): + create_post(ds, 'tonystark', "Tony's post #{0}".format(n)) + create_post(ds, 'peterparker', "Peter's post #{0}".format(n)) + + print("Re-posting tony's post as peter...") + + tonysposts = list_posts_by_user(ds, 'tonystark') + for post in tonysposts: + original_post = post + break + + repost(ds, 'peterparker', original_post) + + print('Posts by tonystark:') + for post in list_posts_by_user(ds, 'tonystark'): + print("> {0} on {1}".format(post['content'], post['created'])) + + print('Posts by peterparker:') + for post in list_posts_by_user(ds, 'peterparker'): + print("> {0} on {1}".format(post['content'], post['created'])) + + print('Posts by everyone:') + for post in list_all_posts(ds): + print("> {0} on {1}".format(post['content'], post['created'])) + + print('Cleaning up...') + ds.delete_multi([ + path_to_key(ds, 'tonystark.user'), + path_to_key(ds, 'peterparker.user') + ]) + ds.delete_multi([ + x.key for x in list_all_posts(ds)]) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description='Demonstrates wiki data model.') + parser.add_argument('project_id', help='Your cloud project ID.') + + args = parser.parse_args() + + main(args.project_id) diff --git a/blog/introduction-to-data-models-in-cloud-datastore/requirements.txt b/blog/introduction-to-data-models-in-cloud-datastore/requirements.txt new file mode 100644 index 000000000000..12e4cd8ca34d --- /dev/null +++ b/blog/introduction-to-data-models-in-cloud-datastore/requirements.txt @@ -0,0 +1,12 @@ +gcloud==0.7.0 +google-apitools==0.4.8 +httplib2==0.9.1 +oauth2client==1.4.12 +protobuf==3.0.0a1 +protorpc==0.10.0 +pyasn1==0.1.8 +pyasn1-modules==0.0.6 +pycrypto==2.6.1 +pytz==2015.4 +rsa==3.1.4 +six==1.9.0 diff --git a/blog/introduction-to-data-models-in-cloud-datastore/test_blog.py b/blog/introduction-to-data-models-in-cloud-datastore/test_blog.py new file mode 100644 index 000000000000..f1e866f9150e --- /dev/null +++ b/blog/introduction-to-data-models-in-cloud-datastore/test_blog.py @@ -0,0 +1,22 @@ +# Copyright 2015, Google, Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from blog import main +from tests import CloudBaseTest + + +class BlogTestCase(CloudBaseTest): + """Simple test case that ensures the blog code doesn't throw any errors.""" + + def test_main(self): + main(self.constants['projectId']) diff --git a/blog/introduction-to-data-models-in-cloud-datastore/wiki.py b/blog/introduction-to-data-models-in-cloud-datastore/wiki.py new file mode 100644 index 000000000000..4b281dee7c70 --- /dev/null +++ b/blog/introduction-to-data-models-in-cloud-datastore/wiki.py @@ -0,0 +1,107 @@ +# Copyright 2015, Google, Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import datetime + +from gcloud import datastore + + +def path_to_key(datastore, path): + """ + Translates a file system path to a datastore key. The basename becomes the + key name and the extension becomes the kind. + + Examples: + /file.ext -> key(ext, file) + /parent.ext/file.ext -> key(ext, parent, ext, file) + """ + key_parts = [] + path_parts = path.strip(u'/').split(u'/') + for n, x in enumerate(path_parts): + name, ext = x.rsplit('.', 1) + key_parts.extend([ext, name]) + + return datastore.key(*key_parts) + + +def save_page(ds, page, content): + with ds.transaction(): + now = datetime.datetime.utcnow() + current_key = path_to_key(ds, '{}.page/current.revision'.format(page)) + revision_key = path_to_key(ds, '{}.page/{}.revision'.format(page, now)) + + if ds.get(revision_key): + raise AssertionError("Revision %s already exists" % revision_key) + + current = ds.get(current_key) + + if current: + revision = datastore.Entity(key=revision_key) + revision.update(current) + ds.put(revision) + else: + current = datastore.Entity(key=current_key) + + current['content'] = content + + ds.put(current) + + +def restore_revision(ds, page, revision): + save_page(ds, page, revision['content']) + + +def list_pages(ds): + return ds.query(kind='page').fetch() + + +def list_revisions(ds, page): + page_key = path_to_key(ds, '{}.page'.format(page)) + return ds.query(kind='revision', ancestor=page_key).fetch() + + +def main(project_id): + ds = datastore.Client(dataset_id=project_id) + + save_page(ds, 'page1', '1') + save_page(ds, 'page1', '2') + save_page(ds, 'page1', '3') + + print('Revisions for page1:') + first_revision = None + for revision in list_revisions(ds, 'page1'): + if not first_revision: + first_revision = revision + print("{}: {}".format(revision.key.name, revision['content'])) + + print('restoring revision {}:'.format(first_revision.key.name)) + restore_revision(ds, 'page1', first_revision) + + print('Revisions for page1:') + for revision in list_revisions(ds, 'page1'): + print("{}: {}".format(revision.key.name, revision['content'])) + + print('Cleaning up') + ds.delete_multi([path_to_key(ds, 'page1.page')]) + ds.delete_multi([x.key for x in list_revisions(ds, 'page1')]) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description='Demonstrates wiki data model.') + parser.add_argument('project_id', help='Your cloud project ID.') + + args = parser.parse_args() + + main(args.project_id) diff --git a/tests/__init__.py b/tests/__init__.py index 5351a4ab962d..0b10a41ca0d3 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -21,10 +21,17 @@ import os import StringIO import sys +import tempfile import unittest -from google.appengine.datastore import datastore_stub_util -from google.appengine.ext import testbed +from nose.plugins.skip import SkipTest + +try: + APPENGINE_AVAILABLE = True + from google.appengine.datastore import datastore_stub_util + from google.appengine.ext import testbed +except ImportError: + APPENGINE_AVAILABLE = False BUCKET_NAME_ENV = 'TEST_BUCKET_NAME' PROJECT_ID_ENV = 'TEST_PROJECT_ID' @@ -82,12 +89,16 @@ def setUp(self): self.constants['cloudStorageOutputURI'] % test_bucket_name) def tearDown(self): - os.environ['SERVER_SOFTWARE'] = self._server_software_org + if self._server_software_org: + os.environ['SERVER_SOFTWARE'] = self._server_software_org class DatastoreTestbedCase(unittest.TestCase): """A base test case for common setup/teardown tasks for test.""" def setUp(self): + if not APPENGINE_AVAILABLE: + raise SkipTest() + """Setup the datastore and memcache stub.""" # First, create an instance of the Testbed class. self.testbed = testbed.Testbed() @@ -99,7 +110,9 @@ def setUp(self): self.policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy( probability=0) # Initialize the datastore stub with this policy. - self.testbed.init_datastore_v3_stub(consistency_policy=self.policy) + self.testbed.init_datastore_v3_stub( + datastore_file=tempfile.mkstemp()[1], + consistency_policy=self.policy) self.testbed.init_memcache_stub() def tearDown(self): diff --git a/tox.ini b/tox.ini index deab332300aa..e4393871aec5 100644 --- a/tox.ini +++ b/tox.ini @@ -5,16 +5,51 @@ envlist = py27, pep8, cover [testenv] passenv = PYTHONPATH GOOGLE_* GCLOUD_* TEST_* TRAVIS* basepython = python2.7 - -[testenv:py27] deps = + pyyaml google-api-python-client flask mock nose + coverage + nose-exclude + nosexcover +coverargs = + --with-xunit + --with-xcoverage + --cover-tests + --cover-branches + --cover-inclusive + +[testenv:py27] +deps = + {[testenv]deps} + gcloud +commands = + nosetests \ + --exclude-dir=bigquery/tests/appengine \ + --exclude-dir=bigquery/samples/appengine_auth \ + --exclude-dir=appengine \ + --exclude-dir=datastore/ndb \ + --exclude-dir=localtesting \ + {[testenv]coverargs} \ + {posargs} + +[testenv:gae] +deps = + {[testenv]deps} nosegae commands = - nosetests --with-gae {posargs} + nosetests --with-gae \ + --logging-level=INFO \ + appengine \ + datastore/ndb \ + localtesting \ + bigquery/tests/test_appengine_auth.py \ + {[testenv]coverargs} \ + {posargs} +setenv = + PYTHONPATH={env:GAE_PYTHONPATH:} [testenv:pep8] deps = @@ -22,15 +57,3 @@ deps = flake8-import-order commands = flake8 --max-complexity=10 --import-order-style=google {posargs} - -[testenv:cover] -deps = - {[testenv:py27]deps} - coverage - coveralls - nosexcover -commands = - nosetests --with-gae --with-xunit --with-xcoverage \ - --cover-tests --cover-branches --cover-min-percentage=70 \ - --cover-inclusive --cover-erase - coveralls