Skip to content

Commit

Permalink
Re-writing Entity to subclass object instead of dict.
Browse files Browse the repository at this point in the history
  • Loading branch information
dhermes committed Dec 19, 2014
1 parent 5723288 commit 0d79df0
Show file tree
Hide file tree
Showing 6 changed files with 127 additions and 34 deletions.
67 changes: 54 additions & 13 deletions gcloud/datastore/entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ class NoDataset(RuntimeError):
"""Exception raised by Entity methods which require a dataset."""


class Entity(dict):
class Entity(object):
"""Entities are akin to rows in a relational database
An entity storing the actual instance of data.
Expand All @@ -41,9 +41,9 @@ class Entity(dict):
Entities in this API act like dictionaries with extras built in that
allow you to delete or persist the data stored on the entity.
Entities are mutable and act like a subclass of a dictionary.
This means you could take an existing entity and change the key
to duplicate the object.
Entities are mutable and properties can be set, updated and deleted
like keys in a dictionary. This means you could take an existing entity
and change the key to duplicate the object.
Use :func:`gcloud.datastore.dataset.Dataset.get_entity`
to retrieve an existing entity.
Expand All @@ -59,10 +59,9 @@ class Entity(dict):
>>> entity
<Entity[{'kind': 'EntityKind', id: 1234}] {'age': 20, 'name': 'JJ'}>
And you can convert an entity to a regular Python dictionary with the
`dict` builtin:
And you can convert an entity to a regular Python dictionary
>>> dict(entity)
>>> entity.to_dict()
{'age': 20, 'name': 'JJ'}
.. note::
Expand Down Expand Up @@ -94,14 +93,57 @@ class Entity(dict):
"""

def __init__(self, dataset=None, kind=None, exclude_from_indexes=()):
super(Entity, self).__init__()
self._dataset = dataset
self._data = {}
if kind:
self._key = Key().kind(kind)
else:
self._key = None
self._exclude_from_indexes = set(exclude_from_indexes)

def __getitem__(self, item_name):
return self._data[item_name]

def __setitem__(self, item_name, value):
self._data[item_name] = value

def __delitem__(self, item_name):
del self._data[item_name]

def clear_properties(self):
"""Clear all properties from the Entity."""
self._data.clear()

def update_properties(self, *args, **kwargs):
"""Allows entity properties to be updated in bulk.
Either takes a single dictionary or uses the keywords passed in.
>>> entity
<Entity[{'kind': 'Foo', 'id': 1}] {}>
>>> entity.update_properties(prop1=u'bar', prop2=u'baz')
>>> entity
<Entity[{'kind': 'Foo', 'id': 1}] {'prop1': u'bar', 'prop2': u'baz'}>
>>> entity.update_properties({'prop1': 0, 'prop2': 1})
>>> entity
<Entity[{'kind': 'Foo', 'id': 1}] {'prop1': 0, 'prop2': 1}>
:raises: `TypeError` a mix of positional and keyword arguments are
used or if more than one positional argument is used.
"""
if args and kwargs or len(args) > 1:
raise TypeError('Only a single dictionary or keyword arguments '
'may be used')
if args:
dict_arg, = args
self._data.update(dict_arg)
else:
self._data.update(kwargs)

def to_dict(self):
"""Converts the stored properties to a dictionary."""
return self._data.copy()

def dataset(self):
"""Get the :class:`.dataset.Dataset` in which this entity belongs.
Expand Down Expand Up @@ -215,7 +257,7 @@ def reload(self):
entity = dataset.get_entity(key.to_protobuf())

if entity:
self.update(entity)
self.update_properties(entity.to_dict())
return self

def save(self):
Expand All @@ -241,7 +283,7 @@ def save(self):
key_pb = connection.save_entity(
dataset_id=dataset.id(),
key_pb=key.to_protobuf(),
properties=dict(self),
properties=self._data,
exclude_from_indexes=self.exclude_from_indexes())

# If we are in a transaction and the current entity needs an
Expand Down Expand Up @@ -284,7 +326,6 @@ def delete(self):

def __repr__(self):
if self._key:
return '<Entity%s %s>' % (self._key.path(),
super(Entity, self).__repr__())
return '<Entity%s %r>' % (self._key.path(), self._data)
else:
return '<Entity %s>' % (super(Entity, self).__repr__())
return '<Entity %r>' % (self._data,)
2 changes: 1 addition & 1 deletion gcloud/datastore/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ def _set_protobuf_value(value_pb, val):
key = val.key()
if key is not None:
e_pb.key.CopyFrom(key.to_protobuf())
for item_key, value in val.items():
for item_key, value in val.to_dict().items():
p_pb = e_pb.property.add()
p_pb.name = item_key
_set_protobuf_value(p_pb.value, value)
Expand Down
6 changes: 3 additions & 3 deletions gcloud/datastore/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def test_get_entity_hit(self):
key = result.key()
self.assertEqual(key._dataset_id, DATASET_ID)
self.assertEqual(key.path(), PATH)
self.assertEqual(list(result), ['foo'])
self.assertEqual(result.to_dict().keys(), ['foo'])
self.assertEqual(result['foo'], 'Foo')

def test_get_entity_path(self):
Expand All @@ -129,7 +129,7 @@ def test_get_entity_path(self):
key = result.key()
self.assertEqual(key._dataset_id, DATASET_ID)
self.assertEqual(key.path(), PATH)
self.assertEqual(list(result), ['foo'])
self.assertEqual(result.to_dict().keys(), ['foo'])
self.assertEqual(result['foo'], 'Foo')

def test_get_entity_odd_nonetype(self):
Expand Down Expand Up @@ -210,7 +210,7 @@ def test_get_entities_hit(self):
key = result.key()
self.assertEqual(key._dataset_id, DATASET_ID)
self.assertEqual(key.path(), PATH)
self.assertEqual(list(result), ['foo'])
self.assertEqual(result.to_dict().keys(), ['foo'])
self.assertEqual(result['foo'], 'Foo')

def test_allocate_ids(self):
Expand Down
60 changes: 59 additions & 1 deletion gcloud/datastore/test_entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,59 @@ def test_key_setter(self):
entity.key(key)
self.assertTrue(entity.key() is key)

def test___delitem__exists(self):
entity = self._makeOne()
entity['foo'] = 'bar'
# This will cause an error (not a failure) if it doesn't work.
# Can't use a try-except because coverage.py doesn't like a branch
# which never occurs.
del entity['foo']

def test___delitem__not_exist(self):
entity = self._makeOne()
fail_occurred = False
try:
del entity['foo']
except KeyError:
fail_occurred = True
self.assertTrue(fail_occurred)

def test_clear_properties(self):
entity = self._makeOne()
entity['foo'] = 0
entity['bar'] = 1
self.assertEqual(entity.to_dict(), {'foo': 0, 'bar': 1})

entity.clear_properties()
self.assertEqual(entity.to_dict(), {})

def test_update_properties_dict(self):
entity = self._makeOne()
self.assertEqual(entity.to_dict(), {})

NEW_VALUES = {'prop1': 0, 'prop2': 1}
entity.update_properties(NEW_VALUES)
self.assertEqual(entity.to_dict(), NEW_VALUES)

def test_update_properties_keywords(self):
entity = self._makeOne()
self.assertEqual(entity.to_dict(), {})

NEW_VALUES = {'prop1': 0, 'prop2': 1}
entity.update_properties(**NEW_VALUES)
self.assertEqual(entity.to_dict(), NEW_VALUES)

entity.update_properties(prop1=10, prop2=11)
NEW_VALUES_AGAIN = {'prop1': 10, 'prop2': 11}
self.assertEqual(entity.to_dict(), NEW_VALUES_AGAIN)

def test_update_properties_invalid(self):
entity = self._makeOne()

dict1 = {'foo': 'bar'}
dict2 = {'baz': 'zip'}
self.assertRaises(TypeError, entity.update_properties, dict1, dict2)

def test_from_key_wo_dataset(self):
from gcloud.datastore.key import Key

Expand Down Expand Up @@ -125,8 +178,13 @@ def test_reload_miss(self):

def test_reload_hit(self):
dataset = _Dataset()
dataset['KEY'] = {'foo': 'Bar'}

fake_entity = self._makeOne(dataset=dataset)
fake_entity['foo'] = 'Bar'

key = _Key()
dataset[key._key] = fake_entity

entity = self._makeOne(dataset)
entity.key(key)
entity['foo'] = 'Foo'
Expand Down
24 changes: 9 additions & 15 deletions regression/datastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def _get_post(self, name=None, key_id=None, post_content=None):
}
# Create an entity with the given content in our dataset.
entity = self.dataset.entity(kind='Post')
entity.update(post_content)
entity.update_properties(post_content)

# Update the entity key.
key = None
Expand Down Expand Up @@ -98,9 +98,7 @@ def _generic_test_post(self, name=None, key_id=None):
entity.key().namespace())

# Check the data is the same.
retrieved_dict = dict(retrieved_entity.items())
entity_dict = dict(entity.items())
self.assertEqual(retrieved_dict, entity_dict)
self.assertEqual(retrieved_entity.to_dict(), entity.to_dict())

def test_post_with_name(self):
self._generic_test_post(name='post1')
Expand Down Expand Up @@ -249,17 +247,15 @@ def test_projection_query(self):
self.assertEqual(len(entities), expected_matches)

arya_entity = entities[0]
arya_dict = dict(arya_entity.items())
self.assertEqual(arya_dict, {'name': 'Arya', 'family': 'Stark'})
self.assertEqual(arya_entity.to_dict(),
{'name': 'Arya', 'family': 'Stark'})

catelyn_stark_entity = entities[2]
catelyn_stark_dict = dict(catelyn_stark_entity.items())
self.assertEqual(catelyn_stark_dict,
self.assertEqual(catelyn_stark_entity.to_dict(),
{'name': 'Catelyn', 'family': 'Stark'})

catelyn_tully_entity = entities[3]
catelyn_tully_dict = dict(catelyn_tully_entity.items())
self.assertEqual(catelyn_tully_dict,
self.assertEqual(catelyn_tully_entity.to_dict(),
{'name': 'Catelyn', 'family': 'Tully'})

# Check both Catelyn keys are the same.
Expand All @@ -273,8 +269,8 @@ def test_projection_query(self):
catelyn_tully_key._dataset_id)

sansa_entity = entities[8]
sansa_dict = dict(sansa_entity.items())
self.assertEqual(sansa_dict, {'name': 'Sansa', 'family': 'Stark'})
self.assertEqual(sansa_entity.to_dict(),
{'name': 'Sansa', 'family': 'Stark'})

def test_query_paginate_with_offset(self):
query = self._base_query()
Expand Down Expand Up @@ -346,7 +342,5 @@ def test_transaction(self):

# This will always return after the transaction.
retrieved_entity = self.dataset.get_entity(key)
retrieved_dict = dict(retrieved_entity.items())
entity_dict = dict(entity.items())
self.assertEqual(retrieved_dict, entity_dict)
self.assertEqual(retrieved_entity.to_dict(), entity.to_dict())
retrieved_entity.delete()
2 changes: 1 addition & 1 deletion regression/populate_datastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def add_characters():
key_path, character))
key = datastore.key.Key(path=key_path)
entity = datastore.entity.Entity(dataset=dataset).key(key)
entity.update(character)
entity.update_properties(character)
entity.save()
print('Adding Character %s %s' % (character['name'],
character['family']))
Expand Down

0 comments on commit 0d79df0

Please sign in to comment.