Skip to content

Commit

Permalink
Implement Key.compare_to_proto to check pb keys against existing.
Browse files Browse the repository at this point in the history
Addresses sixth part of googleapis#451.
  • Loading branch information
dhermes committed Dec 23, 2014
1 parent 64ab5ee commit 031b864
Show file tree
Hide file tree
Showing 4 changed files with 173 additions and 27 deletions.
26 changes: 5 additions & 21 deletions gcloud/datastore/entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class NoDataset(RuntimeError):
"""Exception raised by Entity methods which require a dataset."""


class Entity(object):
class Entity(_implicit_environ._DatastoreBase):
"""Entities are akin to rows in a relational database
An entity storing the actual instance of data.
Expand Down Expand Up @@ -94,9 +94,7 @@ class Entity(object):
"""

def __init__(self, dataset=None, kind=None, exclude_from_indexes=()):
# Does not inherit from object, so we don't use
# _implicit_environ._DatastoreBase to avoid split MRO.
self._dataset = dataset or _implicit_environ.DATASET
super(Entity, self).__init__(dataset=dataset)
self._data = {}
if kind:
self._key = Key(kind)
Expand Down Expand Up @@ -286,7 +284,7 @@ def save(self):
key_pb = connection.save_entity(
dataset_id=dataset.id(),
key_pb=key.to_protobuf(),
properties=self._data,
properties=self.to_dict(),
exclude_from_indexes=self.exclude_from_indexes())

# If we are in a transaction and the current entity needs an
Expand All @@ -296,22 +294,8 @@ def save(self):
transaction.add_auto_id_entity(self)

if isinstance(key_pb, datastore_pb.Key):
# Update the path (which may have been altered).
# NOTE: The underlying namespace can't have changed in a save().
# The value of the dataset ID may have changed from implicit
# (i.e. None, with the ID implied from the dataset.Dataset
# object associated with the Entity/Key), but if it was
# implicit before the save() we leave it as implicit.
path = []
for element in key_pb.path_element:
key_part = {}
for descriptor, value in element._fields.items():
key_part[descriptor.name] = value
path.append(key_part)
# This is temporary. Will be addressed throughout #451.
clone = key._clone()
clone._path = path
self._key = clone
# Update the key (which may have been altered).
self.key(self.key().compare_to_proto(key_pb))

return self

Expand Down
86 changes: 86 additions & 0 deletions gcloud/datastore/key.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,92 @@ def complete_key(self, id_or_name):
new_key._flat_path += (id_or_name,)
return new_key

def _validate_protobuf_dataset_id(self, protobuf):
"""Checks that dataset ID on protobuf matches current one.
The value of the dataset ID may have changed from unprefixed
(e.g. 'foo') to prefixed (e.g. 's~foo' or 'e~foo').
:type protobuf: :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param protobuf: A protobuf representation of the key. Expected to be
returned after a datastore operation.
:rtype: :class:`str`
"""
proto_dataset_id = protobuf.partition_id.dataset_id
if proto_dataset_id == self.dataset_id:
return

# Since they don't match, we check to see if `proto_dataset_id` has a
# prefix.
unprefixed = None
prefix = proto_dataset_id[:2]
if prefix in ('s~', 'e~'):
unprefixed = proto_dataset_id[2:]

if unprefixed != self.dataset_id:
raise ValueError('Dataset ID on protobuf does not match.',
proto_dataset_id, self.dataset_id)

def compare_to_proto(self, protobuf):
"""Checks current key against a protobuf; updates if partial.
If the current key is partial, returns a new key that has been
completed otherwise returns the current key.
The value of the dataset ID may have changed from implicit (i.e. None,
with the ID implied from the dataset.Dataset object associated with the
Entity/Key), but if it was implicit before, we leave it as implicit.
:type protobuf: :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param protobuf: A protobuf representation of the key. Expected to be
returned after a datastore operation.
:rtype: :class:`gcloud.datastore.key.Key`
:returns: The current key if not partial.
:raises: `ValueError` if the namespace or dataset ID of `protobuf`
don't match the current values or if the path from `protobuf`
doesn't match.
"""
if self.namespace is None:
if protobuf.partition_id.HasField('namespace'):
raise ValueError('Namespace unset on key but set on protobuf.')
elif protobuf.partition_id.namespace != self.namespace:
raise ValueError('Namespace on protobuf does not match.',
protobuf.partition_id.namespace, self.namespace)

# Check that dataset IDs match if not implicit.
if self.dataset_id is not None:
self._validate_protobuf_dataset_id(protobuf)

path = []
for element in protobuf.path_element:
key_part = {}
for descriptor, value in element._fields.items():
key_part[descriptor.name] = value
path.append(key_part)

if path == self.path:
return self

if not self.is_partial:
raise ValueError('Proto path does not match completed key.',
path, self.path)

last_part = path[-1]
id_or_name = None
if 'id' in last_part:
id_or_name = last_part.pop('id')
elif 'name' in last_part:
id_or_name = last_part.pop('name')

# We have edited path by popping from the last part, so check again.
if path != self.path:
raise ValueError('Proto path does not match partial key.',
path, self.path)

return self.complete_key(id_or_name)

def to_protobuf(self):
"""Return a protobuf corresponding to the key.
Expand Down
7 changes: 1 addition & 6 deletions gcloud/datastore/test_entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,12 +345,7 @@ def get_entities(self, keys):
return [self.get(key) for key in keys]

def allocate_ids(self, incomplete_key, num_ids):
def clone_with_new_id(key, new_id):
clone = key._clone()
clone._path[-1]['id'] = new_id
return clone
return [clone_with_new_id(incomplete_key, i + 1)
for i in range(num_ids)]
return [incomplete_key.complete_key(i + 1) for i in range(num_ids)]


class _Connection(object):
Expand Down
81 changes: 81 additions & 0 deletions gcloud/datastore/test_key.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,87 @@ def test_complete_key_on_complete(self):
key = self._makeOne('KIND', 1234)
self.assertRaises(ValueError, key.complete_key, 5678)

def test_compare_to_proto_incomplete_w_id(self):
_ID = 1234
key = self._makeOne('KIND')
pb = key.to_protobuf()
pb.path_element[0].id = _ID
new_key = key.compare_to_proto(pb)
self.assertFalse(new_key is key)
self.assertEqual(new_key.id, _ID)
self.assertEqual(new_key.name, None)

def test_compare_to_proto_incomplete_w_name(self):
_NAME = 'NAME'
key = self._makeOne('KIND')
pb = key.to_protobuf()
pb.path_element[0].name = _NAME
new_key = key.compare_to_proto(pb)
self.assertFalse(new_key is key)
self.assertEqual(new_key.id, None)
self.assertEqual(new_key.name, _NAME)

def test_compare_to_proto_incomplete_w_incomplete(self):
key = self._makeOne('KIND')
pb = key.to_protobuf()
new_key = key.compare_to_proto(pb)
self.assertTrue(new_key is key)

def test_compare_to_proto_incomplete_w_bad_path(self):
key = self._makeOne('KIND1', 1234, 'KIND2')
pb = key.to_protobuf()
pb.path_element[0].kind = 'NO_KIND'
self.assertRaises(ValueError, key.compare_to_proto, pb)

def test_compare_to_proto_complete_w_id(self):
key = self._makeOne('KIND', 1234)
pb = key.to_protobuf()
pb.path_element[0].id = 5678
self.assertRaises(ValueError, key.compare_to_proto, pb)

def test_compare_to_proto_complete_w_name(self):
key = self._makeOne('KIND', 1234)
pb = key.to_protobuf()
pb.path_element[0].name = 'NAME'
self.assertRaises(ValueError, key.compare_to_proto, pb)

def test_compare_to_proto_complete_w_incomplete(self):
key = self._makeOne('KIND', 1234)
pb = key.to_protobuf()
pb.path_element[0].ClearField('id')
self.assertRaises(ValueError, key.compare_to_proto, pb)

def test_compare_to_proto_complete_diff_dataset(self):
key = self._makeOne('KIND', 1234, dataset_id='DATASET')
pb = key.to_protobuf()
pb.partition_id.dataset_id = 's~' + key.dataset_id
new_key = key.compare_to_proto(pb)
self.assertTrue(new_key is key)

def test_compare_to_proto_complete_bad_dataset(self):
key = self._makeOne('KIND', 1234, dataset_id='DATASET')
pb = key.to_protobuf()
pb.partition_id.dataset_id = 'BAD_PRE~' + key.dataset_id
self.assertRaises(ValueError, key.compare_to_proto, pb)

def test_compare_to_proto_complete_valid_namespace(self):
key = self._makeOne('KIND', 1234, namespace='NAMESPACE')
pb = key.to_protobuf()
new_key = key.compare_to_proto(pb)
self.assertTrue(new_key is key)

def test_compare_to_proto_complete_namespace_unset_on_pb(self):
key = self._makeOne('KIND', 1234, namespace='NAMESPACE')
pb = key.to_protobuf()
pb.partition_id.ClearField('namespace')
self.assertRaises(ValueError, key.compare_to_proto, pb)

def test_compare_to_proto_complete_namespace_unset_on_key(self):
key = self._makeOne('KIND', 1234)
pb = key.to_protobuf()
pb.partition_id.namespace = 'NAMESPACE'
self.assertRaises(ValueError, key.compare_to_proto, pb)

def test_to_protobuf_defaults(self):
from gcloud.datastore.datastore_v1_pb2 import Key as KeyPB
_KIND = 'KIND'
Expand Down

0 comments on commit 031b864

Please sign in to comment.