Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 30 additions & 10 deletions gcloud/datastore/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,11 +166,13 @@ def _extended_lookup(connection, dataset_id, key_pbs,
return results


def get(keys, missing=None, deferred=None, connection=None, dataset_id=None):
def get(key_or_keys, missing=None, deferred=None,
connection=None, dataset_id=None):
"""Retrieves entities, along with their attributes.

:type keys: list of :class:`gcloud.datastore.key.Key`
:param keys: The keys to be retrieved from the datastore.
:type key_or_keys: list of :class:`gcloud.datastore.key.Key` or
:class:`gcloud.datastore.key.Key`
:param key_or_keys: The key or keys to be retrieved from the datastore.

:type missing: an empty list or None.
:param missing: If a list is passed, the key-only entities returned
Expand All @@ -191,15 +193,22 @@ def get(keys, missing=None, deferred=None, connection=None, dataset_id=None):
If not passed, inferred from the environment.

:rtype: list of :class:`gcloud.datastore.entity.Entity`
:returns: The requested entities.
:returns: The requested entities (or a single entity if a single key is
passed).
:raises: EnvironmentError if ``connection`` or ``dataset_id`` not passed,
and cannot be inferred from the environment. ValueError if
one or more of ``keys`` has a dataset ID which does not match
the passed / inferred dataset ID.
"""
if not keys:
if not key_or_keys:
return []

single_key = not isinstance(key_or_keys, list)

This comment was marked as spam.

This comment was marked as spam.

if single_key:
keys = [key_or_keys]
else:
keys = key_or_keys

connection = _require_connection(connection)
dataset_id = _require_dataset_id(dataset_id, keys[0])

Expand Down Expand Up @@ -231,7 +240,12 @@ def get(keys, missing=None, deferred=None, connection=None, dataset_id=None):
for entity_pb in entity_pbs:
entities.append(helpers.entity_from_protobuf(entity_pb))

return entities
if not single_key:
return entities
else:
if entities:
# Assumes a single key will result in at most 1 entity.
return entities[0]


def put(entities, connection=None, dataset_id=None):
Expand Down Expand Up @@ -272,11 +286,12 @@ def put(entities, connection=None, dataset_id=None):
current.commit()


def delete(keys, connection=None, dataset_id=None):
def delete(key_or_keys, connection=None, dataset_id=None):
"""Delete the keys in the Cloud Datastore.

:type keys: list of :class:`gcloud.datastore.key.Key`
:param keys: The keys to be deleted from the datastore.
:type key_or_keys: list of :class:`gcloud.datastore.key.Key` or
:class:`gcloud.datastore.key.Key`
:param key_or_keys: The key or keys to be deleted from the datastore.

:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: Optional connection used to connect to datastore.
Expand All @@ -291,9 +306,14 @@ def delete(keys, connection=None, dataset_id=None):
one or more keys has a dataset ID not matching the passed /
inferred dataset ID.
"""
if not keys:
if not key_or_keys:
return

if isinstance(key_or_keys, list):
keys = key_or_keys
else:
keys = [key_or_keys]

connection = _require_connection(connection)
dataset_id = _require_dataset_id(dataset_id, keys[0])

Expand Down
56 changes: 56 additions & 0 deletions gcloud/datastore/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,17 @@ def test_miss(self):
dataset_id=DATASET_ID)
self.assertEqual(results, [])

def test_miss_single_key(self):
from gcloud.datastore.key import Key
from gcloud.datastore.test_connection import _Connection

DATASET_ID = 'DATASET'
connection = _Connection()
key = Key('Kind', 1234, dataset_id=DATASET_ID)
result = self._callFUT(key, connection=connection,
dataset_id=DATASET_ID)
self.assertTrue(result is None)

def test_miss_wo_dataset_id(self):
from gcloud.datastore.key import Key
from gcloud.datastore.test_connection import _Connection
Expand Down Expand Up @@ -416,6 +427,34 @@ def test_hit(self):
self.assertEqual(list(result), ['foo'])
self.assertEqual(result['foo'], 'Foo')

def test_hit_single_key(self):
from gcloud.datastore.key import Key
from gcloud.datastore.test_connection import _Connection

DATASET_ID = 'DATASET'
KIND = 'Kind'
ID = 1234
PATH = [{'kind': KIND, 'id': ID}]

# Make a found entity pb to be returned from mock backend.
entity_pb = self._make_entity_pb(DATASET_ID, KIND, ID,
'foo', 'Foo')

# Make a connection to return the entity pb.
connection = _Connection(entity_pb)

key = Key(KIND, ID, dataset_id=DATASET_ID)
result = self._callFUT(key, connection=connection,
dataset_id=DATASET_ID)
new_key = result.key

# Check the returned value is as expected.
self.assertFalse(new_key is key)
self.assertEqual(new_key.dataset_id, DATASET_ID)
self.assertEqual(new_key.path, PATH)
self.assertEqual(list(result), ['foo'])
self.assertEqual(result['foo'], 'Foo')

def test_hit_multiple_keys_same_dataset(self):
from gcloud.datastore.key import Key
from gcloud.datastore.test_connection import _Connection
Expand Down Expand Up @@ -781,6 +820,23 @@ def test_no_batch(self):
self.assertEqual(dataset_id, _DATASET)
self.assertEqual(list(mutation.delete), [key.to_protobuf()])

def test_no_batch_single_key(self):
from gcloud.datastore.test_batch import _Connection
from gcloud.datastore.test_batch import _Key

# Build basic mocks needed to delete.
_DATASET = 'DATASET'
connection = _Connection()
key = _Key(_DATASET)

result = self._callFUT(key, connection=connection,
dataset_id=_DATASET)
self.assertEqual(result, None)
self.assertEqual(len(connection._committed), 1)
dataset_id, mutation = connection._committed[0]
self.assertEqual(dataset_id, _DATASET)
self.assertEqual(list(mutation.delete), [key.to_protobuf()])

def test_wo_batch_w_key_different_than_default_dataset_id(self):
from gcloud.datastore._testing import _monkey_defaults
from gcloud.datastore.test_batch import _Connection
Expand Down