From 690b5d9d4dee5195872e87089e4f0ffe3963e810 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 13:31:04 -0500 Subject: [PATCH 1/7] Cherry pick cleanups from #499. - Rework '_require_dataset' -> '_require_dataset_id'. Also, make both it and '_require_connection' take the passed object, and test explicitly against None. - Docstring tweaks to reflect API reshapings. - Clarify implicit environ tests. - Clarify dependency on implicit environ in key tests. --- gcloud/datastore/__init__.py | 48 +++++--- gcloud/datastore/connection.py | 34 +++--- gcloud/datastore/entity.py | 5 +- gcloud/datastore/key.py | 43 ++++--- gcloud/datastore/test___init__.py | 183 ++++++++++++++++++------------ gcloud/datastore/test_key.py | 158 +++++++++++++++++--------- 6 files changed, 292 insertions(+), 179 deletions(-) diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index fca5f10e4270..9edf1b5230ea 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -104,28 +104,40 @@ def get_connection(): return Connection(credentials=scoped_credentials) -def _require_dataset_id(): - """Convenience method to ensure DATASET_ID is set. +def _require_dataset_id(dataset_id=None): + """Infer a dataset ID from the environment, if not passed explicitly. - :rtype: :class:`str` - :returns: A dataset ID based on the current environment. - :raises: :class:`EnvironmentError` if DATASET_ID is not set. + :type dataset_id: :class:`str`. + :param dataset_id: Optional. + + :rtype: :class:`gcloud.datastore.dataset.Dataset` + :returns: A dataset based on the current environment. + :raises: :class:`EnvironmentError` if ``dataset_id`` is None, + and cannot be inferred from the environment. """ - if _implicit_environ.DATASET_ID is None: - raise EnvironmentError('Dataset ID could not be inferred.') - return _implicit_environ.DATASET_ID + if dataset_id is None: + if _implicit_environ.DATASET_ID is None: + raise EnvironmentError('Dataset ID could not be inferred.') + dataset_id = _implicit_environ.DATASET_ID + return dataset_id -def _require_connection(): - """Convenience method to ensure CONNECTION is set. +def _require_connection(connection=None): + """Infer a connection from the environment, if not passed explicitly. + + :type connection: :class:`gcloud.datastore.connection.Connection` + :param connection: Optional. :rtype: :class:`gcloud.datastore.connection.Connection` :returns: A connection based on the current environment. - :raises: :class:`EnvironmentError` if CONNECTION is not set. + :raises: :class:`EnvironmentError` if ``connection`` is None, and + cannot be inferred from the environment. """ - if _implicit_environ.CONNECTION is None: - raise EnvironmentError('Connection could not be inferred.') - return _implicit_environ.CONNECTION + if connection is None: + if _implicit_environ.CONNECTION is None: + raise EnvironmentError('Connection could not be inferred.') + connection = _implicit_environ.CONNECTION + return connection def get_entities(keys, missing=None, deferred=None, @@ -154,8 +166,8 @@ def get_entities(keys, missing=None, deferred=None, :rtype: list of :class:`gcloud.datastore.entity.Entity` :returns: The requested entities. """ - connection = connection or _require_connection() - dataset_id = dataset_id or _require_dataset_id() + connection = _require_connection(connection) + dataset_id = _require_dataset_id(dataset_id) entity_pbs = connection.lookup( dataset_id=dataset_id, @@ -199,8 +211,8 @@ def allocate_ids(incomplete_key, num_ids, connection=None, dataset_id=None): :returns: The (complete) keys allocated with `incomplete_key` as root. :raises: `ValueError` if `incomplete_key` is not a partial key. """ - connection = connection or _require_connection() - dataset_id = dataset_id or _require_dataset_id() + connection = _require_connection(connection) + dataset_id = _require_dataset_id(dataset_id) if not incomplete_key.is_partial: raise ValueError(('Key is not partial.', incomplete_key)) diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index eeacc44ba35c..8b6d18f22926 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -163,17 +163,13 @@ def lookup(self, dataset_id, key_pbs, (:class:`gcloud.datastore.datastore_v1_pb2.Key` and :class:`gcloud.datastore.datastore_v1_pb2.Entity`) and is used under the hood for methods like - :func:`gcloud.datastore.dataset.Dataset.get_entity`: + :func:`gcloud.datastore.key.Key.get`: >>> from gcloud import datastore >>> from gcloud.datastore.key import Key >>> connection = datastore.get_connection() - >>> dataset = connection.dataset('dataset-id') - >>> key = Key(dataset=dataset).kind('MyKind').id(1234) - - Using the :class:`gcloud.datastore.dataset.Dataset` helper: - - >>> dataset.get_entity(key) + >>> key = Key('MyKind', 1234, dataset_id='dataset-id') + >>> key.get() Using the ``connection`` class directly: @@ -182,7 +178,7 @@ def lookup(self, dataset_id, key_pbs, :type dataset_id: string - :param dataset_id: The dataset to look up the keys. + :param dataset_id: The ID of the dataset to look up the keys. :type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key` (or a single Key) @@ -262,12 +258,12 @@ def run_query(self, dataset_id, query_pb, namespace=None, eventual=False): uses this method to fetch data: >>> from gcloud import datastore + >>> from gcloud.datastore.query import Query >>> connection = datastore.get_connection() - >>> dataset = connection.dataset('dataset-id') - >>> query = dataset.query().kind('MyKind').filter( - ... 'property', '=', 'val') + >>> query = Query(dataset_id='dataset-id', 'MyKind') + >>> query.add_filter('property', '=', 'val') - Using the `fetch`` method... + Using the query's ``fetch_page`` method... >>> entities, cursor, more_results = query.fetch_page() >>> entities @@ -319,7 +315,7 @@ def begin_transaction(self, dataset_id, serializable=False): Maps the ``DatastoreService.BeginTransaction`` protobuf RPC. :type dataset_id: string - :param dataset_id: The dataset over which to execute the transaction. + :param dataset_id: The ID dataset to which the transaction applies. """ if self.transaction(): @@ -346,7 +342,7 @@ def commit(self, dataset_id, mutation_pb): Maps the ``DatastoreService.Commit`` protobuf RPC. :type dataset_id: string - :param dataset_id: The dataset in which to perform the changes. + :param dataset_id: The ID dataset to which the transaction applies. :type mutation_pb: :class:`gcloud.datastore.datastore_v1_pb2.Mutation`. :param mutation_pb: The protobuf for the mutations being saved. @@ -376,7 +372,8 @@ def rollback(self, dataset_id): if the connection isn't currently in a transaction. :type dataset_id: string - :param dataset_id: The dataset to which the transaction belongs. + :param dataset_id: The id of the dataset to which the transaction + belongs. """ if not self.transaction() or not self.transaction().id: raise ValueError('No transaction to rollback.') @@ -393,7 +390,8 @@ def allocate_ids(self, dataset_id, key_pbs): Maps the ``DatastoreService.AllocateIds`` protobuf RPC. :type dataset_id: string - :param dataset_id: The dataset to which the transaction belongs. + :param dataset_id: The id of the dataset to which the transaction + belongs. :type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key` :param key_pbs: The keys for which the backend should allocate IDs. @@ -418,7 +416,7 @@ def save_entity(self, dataset_id, key_pb, properties, not passed in 'properties' no longer be set for the entity. :type dataset_id: string - :param dataset_id: The dataset in which to save the entity. + :param dataset_id: The id of the dataset in which to save the entity. :type key_pb: :class:`gcloud.datastore.datastore_v1_pb2.Key` :param key_pb: The complete or partial key for the entity. @@ -490,7 +488,7 @@ def delete_entities(self, dataset_id, key_pbs): :func:`gcloud.datastore.entity.Entity.delete` method. :type dataset_id: string - :param dataset_id: The dataset from which to delete the keys. + :param dataset_id: The ID of the dataset from which to delete the keys. :type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key` :param key_pbs: The keys to delete from the datastore. diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 479f136f43e9..47efe59639fa 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -40,10 +40,9 @@ class Entity(dict): This means you could take an existing entity and change the key to duplicate the object. - Use :func:`gcloud.datastore.dataset.Dataset.get_entity` - to retrieve an existing entity. + Use :func:`gcloud.datastore.key.Key.get` to retrieve an existing entity. - >>> dataset.get_entity(key) + >>> key.get() You can the set values on the entity just like you would on any diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index 074fa3246cae..300db3b3cff9 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -66,27 +66,14 @@ def __init__(self, *path_args, **kwargs): keyword argument. """ self._flat_path = path_args - self._parent = kwargs.get('parent') + parent = self._parent = kwargs.get('parent') self._namespace = kwargs.get('namespace') - self._dataset_id = kwargs.get('dataset_id') + dataset_id = kwargs.get('dataset_id') + self._dataset_id = _validate_dataset_id(dataset_id, parent) # _flat_path, _parent, _namespace and _dataset_id must be set before # _combine_args() is called. self._path = self._combine_args() - self._validate_dataset_id() - def _validate_dataset_id(self): - """Ensures the dataset ID is set. - - If unset, attempts to imply the ID from the environment. - - :raises: `ValueError` if there is no `dataset_id` and none - can be implied. - """ - if self._dataset_id is None: - if _implicit_environ.DATASET_ID is not None: - self._dataset_id = _implicit_environ.DATASET_ID - else: - raise ValueError('A Key must have a dataset ID set.') @staticmethod def _parse_path(path_args): @@ -344,7 +331,7 @@ def dataset_id(self): """Dataset ID getter. :rtype: :class:`str` - :returns: The key's dataset. + :returns: The key's dataset ID. """ return self._dataset_id @@ -383,3 +370,25 @@ def parent(self): def __repr__(self): return '' % (self.path, self.dataset_id) + + +def _validate_dataset_id(dataset_id, parent): + """Ensure the dataset ID is set appropriately. + + If ``parent`` is passed, skip the test (it will be checked / fixed up + later). + + If ``dataset_id`` is unset, attempt to infer the ID from the environment. + + :raises: `ValueError` if ``dataset_id`` is None and none can be inferred. + """ + if parent is None: + + if dataset_id is None: + + if _implicit_environ.DATASET_ID is None: + raise ValueError("A Key must have a dataset ID set.") + + dataset_id = _implicit_environ.DATASET_ID + + return dataset_id diff --git a/gcloud/datastore/test___init__.py b/gcloud/datastore/test___init__.py index 575665f5228c..11f209374464 100644 --- a/gcloud/datastore/test___init__.py +++ b/gcloud/datastore/test___init__.py @@ -15,26 +15,6 @@ import unittest2 -class Test_get_connection(unittest2.TestCase): - - def _callFUT(self): - from gcloud.datastore import get_connection - return get_connection() - - def test_it(self): - from gcloud import credentials - from gcloud.datastore.connection import Connection - from gcloud.test_credentials import _Client - from gcloud._testing import _Monkey - - client = _Client() - with _Monkey(credentials, client=client): - found = self._callFUT() - self.assertTrue(isinstance(found, Connection)) - self.assertTrue(found._credentials is client._signed) - self.assertTrue(client._get_app_default_called) - - class Test_set_default_dataset_id(unittest2.TestCase): def setUp(self): @@ -50,44 +30,53 @@ def _callFUT(self, dataset_id=None): from gcloud.datastore import set_default_dataset_id return set_default_dataset_id(dataset_id=dataset_id) - def _test_with_environ(self, environ, expected_result, dataset_id=None): + def _monkey(self, implicit_dataset_id): import os + from gcloud.datastore import _DATASET_ENV_VAR_NAME from gcloud._testing import _Monkey - from gcloud.datastore import _implicit_environ + environ = {_DATASET_ENV_VAR_NAME: implicit_dataset_id} + return _Monkey(os, getenv=environ.get) - # Check the environment is unset. + def test_no_env_var_set(self): + from gcloud.datastore import _implicit_environ + with self._monkey(None): + self._callFUT() self.assertEqual(_implicit_environ.DATASET_ID, None) - def custom_getenv(key): - return environ.get(key) - - with _Monkey(os, getenv=custom_getenv): - self._callFUT(dataset_id=dataset_id) - - self.assertEqual(_implicit_environ.DATASET_ID, expected_result) - def test_set_from_env_var(self): - from gcloud.datastore import _DATASET_ENV_VAR_NAME from gcloud.datastore import _implicit_environ + IMPLICIT_DATASET_ID = 'IMPLICIT' + with self._monkey(IMPLICIT_DATASET_ID): + self._callFUT() + self.assertEqual(_implicit_environ.DATASET_ID, IMPLICIT_DATASET_ID) - # Make a custom getenv function to Monkey. - DATASET = 'dataset' - VALUES = { - _DATASET_ENV_VAR_NAME: DATASET, - } - self._test_with_environ(VALUES, DATASET) - self.assertEqual(_implicit_environ.DATASET_ID, DATASET) + def test_set_explicit_w_env_var_set(self): + from gcloud.datastore import _implicit_environ + EXPLICIT_DATASET_ID = 'EXPLICIT' + with self._monkey(None): + self._callFUT(EXPLICIT_DATASET_ID) + self.assertEqual(_implicit_environ.DATASET_ID, EXPLICIT_DATASET_ID) - def test_no_env_var_set(self): + def test_set_explicit_no_env_var_set(self): from gcloud.datastore import _implicit_environ - self._test_with_environ({}, None) + IMPLICIT_DATASET_ID = 'IMPLICIT' + EXPLICIT_DATASET_ID = 'EXPLICIT' + with self._monkey(IMPLICIT_DATASET_ID): + self._callFUT(EXPLICIT_DATASET_ID) + self.assertEqual(_implicit_environ.DATASET_ID, EXPLICIT_DATASET_ID) + + def test_set_explicit_None_wo_env_var_set(self): + from gcloud.datastore import _implicit_environ + with self._monkey(None): + self._callFUT(None) self.assertEqual(_implicit_environ.DATASET_ID, None) - def test_set_explicit(self): + def test_set_explicit_None_w_env_var_set(self): from gcloud.datastore import _implicit_environ - DATASET_ID = 'DATASET' - self._test_with_environ({}, DATASET_ID, dataset_id=DATASET_ID) - self.assertEqual(_implicit_environ.DATASET_ID, DATASET_ID) + IMPLICIT_DATASET_ID = 'IMPLICIT' + with self._monkey(IMPLICIT_DATASET_ID): + self._callFUT(None) + self.assertEqual(_implicit_environ.DATASET_ID, IMPLICIT_DATASET_ID) class Test_set_default_connection(unittest2.TestCase): @@ -127,45 +116,99 @@ def test_set_implicit(self): self.assertEqual(_implicit_environ.CONNECTION, fake_cnxn) -class Test_implicit_behavior(unittest2.TestCase): +class Test_get_connection(unittest2.TestCase): - def test__require_dataset_id_value_unset(self): - import gcloud.datastore - from gcloud.datastore import _implicit_environ + def _callFUT(self): + from gcloud.datastore import get_connection + return get_connection() + + def test_it(self): + from gcloud import credentials + from gcloud.datastore.connection import Connection + from gcloud.test_credentials import _Client from gcloud._testing import _Monkey - with _Monkey(_implicit_environ, DATASET_ID=None): - with self.assertRaises(EnvironmentError): - gcloud.datastore._require_dataset_id() + client = _Client() + with _Monkey(credentials, client=client): + found = self._callFUT() + self.assertTrue(isinstance(found, Connection)) + self.assertTrue(found._credentials is client._signed) + self.assertTrue(client._get_app_default_called) - def test__require_dataset_id_value_set(self): - import gcloud.datastore - from gcloud.datastore import _implicit_environ - from gcloud._testing import _Monkey - FAKE_DATASET_ID = object() - with _Monkey(_implicit_environ, DATASET_ID=FAKE_DATASET_ID): - stored_dataset_id = gcloud.datastore._require_dataset_id() - self.assertTrue(stored_dataset_id is FAKE_DATASET_ID) +class Test__require_dataset_id(unittest2.TestCase): + + _MARKER = object() + + def _callFUT(self, passed=_MARKER): + from gcloud.datastore import _require_dataset_id + if passed is self._MARKER: + return _require_dataset_id() + return _require_dataset_id(passed) - def test__require_connection_value_unset(self): - import gcloud.datastore + def _monkey(self, dataset_id): from gcloud.datastore import _implicit_environ from gcloud._testing import _Monkey + return _Monkey(_implicit_environ, DATASET_ID=dataset_id) - with _Monkey(_implicit_environ, CONNECTION=None): + def test__require_dataset_implicit_unset(self): + with self._monkey(None): with self.assertRaises(EnvironmentError): - gcloud.datastore._require_connection() + self._callFUT() - def test__require_connection_value_set(self): - import gcloud.datastore + def test__require_dataset_implicit_unset_passed_explicitly(self): + ID = 'DATASET' + with self._monkey(None): + self.assertEqual(self._callFUT(ID), ID) + + def test__require_dataset_id_implicit_set(self): + IMPLICIT_ID = 'IMPLICIT' + with self._monkey(IMPLICIT_ID): + stored_id = self._callFUT() + self.assertTrue(stored_id is IMPLICIT_ID) + + def test__require_dataset_id_implicit_set_passed_explicitly(self): + ID = 'DATASET' + IMPLICIT_ID = 'IMPLICIT' + with self._monkey(IMPLICIT_ID): + self.assertEqual(self._callFUT(ID), ID) + + +class Test_require_connection(unittest2.TestCase): + + _MARKER = object() + + def _callFUT(self, passed=_MARKER): + from gcloud.datastore import _require_connection + if passed is self._MARKER: + return _require_connection() + return _require_connection(passed) + + def _monkey(self, connection): from gcloud.datastore import _implicit_environ from gcloud._testing import _Monkey + return _Monkey(_implicit_environ, CONNECTION=connection) - FAKE_CONNECTION = object() - with _Monkey(_implicit_environ, CONNECTION=FAKE_CONNECTION): - stored_connection = gcloud.datastore._require_connection() - self.assertTrue(stored_connection is FAKE_CONNECTION) + def test__require_connection_implicit_unset(self): + with self._monkey(None): + with self.assertRaises(EnvironmentError): + self._callFUT() + + def test__require_connection_implicit_unset_passed_explicitly(self): + CONNECTION = object() + with self._monkey(None): + self.assertTrue(self._callFUT(CONNECTION) is CONNECTION) + + def test__require_connection_implicit_set(self): + IMPLICIT_CONNECTION = object() + with self._monkey(IMPLICIT_CONNECTION): + self.assertTrue(self._callFUT() is IMPLICIT_CONNECTION) + + def test__require_connection_implicit_set_passed_explicitly(self): + IMPLICIT_CONNECTION = object() + CONNECTION = object() + with self._monkey(IMPLICIT_CONNECTION): + self.assertTrue(self._callFUT(CONNECTION) is CONNECTION) class Test_get_entities_function(unittest2.TestCase): diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index f8dbde38ca63..1d294297a373 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -17,8 +17,9 @@ class TestKey(unittest2.TestCase): + _DEFAULT_DATASET = 'DATASET' + def setUp(self): - self._DEFAULT_DATASET = 'DATASET' from gcloud.datastore import _implicit_environ self._replaced_dataset_id = _implicit_environ.DATASET_ID @@ -29,40 +30,56 @@ def tearDown(self): _implicit_environ.DATASET_ID = self._replaced_dataset_id def _getTargetClass(self): - from gcloud.datastore import _implicit_environ from gcloud.datastore.key import Key - - _implicit_environ.DATASET_ID = self._DEFAULT_DATASET return Key def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) + def _monkeyDatasetID(self, dataset_id=_DEFAULT_DATASET): + from gcloud._testing import _Monkey + from gcloud.datastore import _implicit_environ + return _Monkey(_implicit_environ, DATASET_ID=dataset_id) + def test_ctor_empty(self): self.assertRaises(ValueError, self._makeOne) def test_ctor_no_dataset_id(self): - from gcloud._testing import _Monkey - from gcloud.datastore import _implicit_environ klass = self._getTargetClass() - with _Monkey(_implicit_environ, DATASET_ID=None): + with self._monkeyDatasetID(None): self.assertRaises(ValueError, klass, 'KIND') + def test_ctor_w_implicit_dataset_id(self): + _DATASET = 'DATASET' + _KIND = 'KIND' + with self._monkeyDatasetID(_DATASET): + key = self._makeOne(_KIND) + self.assertEqual(key.dataset_id, _DATASET) + self.assertEqual(key.namespace, None) + self.assertEqual(key.kind, _KIND) + self.assertEqual(key.path, [{'kind': _KIND}]) + + def test_ctor_w_implicit_dataset_id_empty_path(self): + _DATASET = 'DATASET' + with self._monkeyDatasetID(_DATASET): + self.assertRaises(ValueError, self._makeOne) + def test_ctor_parent(self): _PARENT_KIND = 'KIND1' _PARENT_ID = 1234 _PARENT_DATASET = 'DATASET-ALT' _PARENT_NAMESPACE = 'NAMESPACE' - parent_key = self._makeOne(_PARENT_KIND, _PARENT_ID, - dataset_id=_PARENT_DATASET, - namespace=_PARENT_NAMESPACE) _CHILD_KIND = 'KIND2' _CHILD_ID = 2345 _PATH = [ {'kind': _PARENT_KIND, 'id': _PARENT_ID}, {'kind': _CHILD_KIND, 'id': _CHILD_ID}, ] - key = self._makeOne(_CHILD_KIND, _CHILD_ID, parent=parent_key) + parent_key = self._makeOne(_PARENT_KIND, _PARENT_ID, + dataset_id=_PARENT_DATASET, + namespace=_PARENT_NAMESPACE) + with self._monkeyDatasetID(): + key = self._makeOne(_CHILD_KIND, _CHILD_ID, parent=parent_key) self.assertEqual(key.dataset_id, parent_key.dataset_id) self.assertEqual(key.namespace, parent_key.namespace) self.assertEqual(key.kind, _CHILD_KIND) @@ -70,23 +87,29 @@ def test_ctor_parent(self): self.assertTrue(key.parent is parent_key) def test_ctor_partial_parent(self): - parent_key = self._makeOne('KIND') - with self.assertRaises(ValueError): - self._makeOne('KIND2', 1234, parent=parent_key) + with self._monkeyDatasetID(): + parent_key = self._makeOne('KIND') + with self.assertRaises(ValueError): + self._makeOne('KIND2', 1234, parent=parent_key) def test_ctor_parent_bad_type(self): - with self.assertRaises(AttributeError): - self._makeOne('KIND2', 1234, parent=('KIND1', 1234)) + with self._monkeyDatasetID(): + with self.assertRaises(AttributeError): + self._makeOne('KIND2', 1234, parent=('KIND1', 1234)) def test_ctor_parent_bad_namespace(self): - parent_key = self._makeOne('KIND', 1234, namespace='FOO') - with self.assertRaises(ValueError): - self._makeOne('KIND2', 1234, namespace='BAR', parent=parent_key) + with self._monkeyDatasetID(): + parent_key = self._makeOne('KIND', 1234, namespace='FOO') + with self.assertRaises(ValueError): + self._makeOne( + 'KIND2', 1234, namespace='BAR', parent=parent_key) def test_ctor_parent_bad_dataset_id(self): parent_key = self._makeOne('KIND', 1234, dataset_id='FOO') - with self.assertRaises(ValueError): - self._makeOne('KIND2', 1234, dataset_id='BAR', parent=parent_key) + with self._monkeyDatasetID(): + with self.assertRaises(ValueError): + self._makeOne('KIND2', 1234, dataset_id='BAR', + parent=parent_key) def test_ctor_explicit(self): _DATASET = 'DATASET-ALT' @@ -96,19 +119,21 @@ def test_ctor_explicit(self): _PATH = [{'kind': _KIND, 'id': _ID}] key = self._makeOne(_KIND, _ID, namespace=_NAMESPACE, dataset_id=_DATASET) - self.assertNotEqual(_DATASET, self._DEFAULT_DATASET) self.assertEqual(key.dataset_id, _DATASET) self.assertEqual(key.namespace, _NAMESPACE) self.assertEqual(key.kind, _KIND) self.assertEqual(key.path, _PATH) def test_ctor_bad_kind(self): - self.assertRaises(ValueError, self._makeOne, object()) + with self._monkeyDatasetID(): + self.assertRaises(ValueError, self._makeOne, object()) def test_ctor_bad_id_or_name(self): - self.assertRaises(ValueError, self._makeOne, 'KIND', object()) - self.assertRaises(ValueError, self._makeOne, 'KIND', None) - self.assertRaises(ValueError, self._makeOne, 'KIND', 10, 'KIND2', None) + with self._monkeyDatasetID(): + self.assertRaises(ValueError, self._makeOne, 'KIND', object()) + self.assertRaises(ValueError, self._makeOne, 'KIND', None) + self.assertRaises(ValueError, + self._makeOne, 'KIND', 10, 'KIND2', None) def test__clone(self): _DATASET = 'DATASET-ALT' @@ -125,7 +150,8 @@ def test__clone(self): self.assertEqual(clone.path, _PATH) def test_completed_key_on_partial_w_id(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') _ID = 1234 new_key = key.completed_key(_ID) self.assertFalse(key is new_key) @@ -133,7 +159,8 @@ def test_completed_key_on_partial_w_id(self): self.assertEqual(new_key.name, None) def test_completed_key_on_partial_w_name(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') _NAME = 'NAME' new_key = key.completed_key(_NAME) self.assertFalse(key is new_key) @@ -141,17 +168,20 @@ def test_completed_key_on_partial_w_name(self): self.assertEqual(new_key.name, _NAME) def test_completed_key_on_partial_w_invalid(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') self.assertRaises(ValueError, key.completed_key, object()) def test_completed_key_on_complete(self): - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) self.assertRaises(ValueError, key.completed_key, 5678) def test_to_protobuf_defaults(self): from gcloud.datastore.datastore_v1_pb2 import Key as KeyPB _KIND = 'KIND' - key = self._makeOne(_KIND) + with self._monkeyDatasetID(): + key = self._makeOne(_KIND) pb = key.to_protobuf() self.assertTrue(isinstance(pb, KeyPB)) @@ -170,13 +200,15 @@ def test_to_protobuf_defaults(self): def test_to_protobuf_w_explicit_dataset_id(self): _DATASET = 'DATASET-ALT' - key = self._makeOne('KIND', dataset_id=_DATASET) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', dataset_id=_DATASET) pb = key.to_protobuf() self.assertEqual(pb.partition_id.dataset_id, _DATASET) def test_to_protobuf_w_explicit_namespace(self): _NAMESPACE = 'NAMESPACE' - key = self._makeOne('KIND', namespace=_NAMESPACE) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', namespace=_NAMESPACE) pb = key.to_protobuf() self.assertEqual(pb.partition_id.namespace, _NAMESPACE) @@ -185,7 +217,8 @@ def test_to_protobuf_w_explicit_path(self): _CHILD = 'CHILD' _ID = 1234 _NAME = 'NAME' - key = self._makeOne(_PARENT, _NAME, _CHILD, _ID) + with self._monkeyDatasetID(): + key = self._makeOne(_PARENT, _NAME, _CHILD, _ID) pb = key.to_protobuf() elems = list(pb.path_element) self.assertEqual(len(elems), 2) @@ -195,7 +228,8 @@ def test_to_protobuf_w_explicit_path(self): self.assertEqual(elems[1].id, _ID) def test_to_protobuf_w_no_kind(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') # Force the 'kind' to be unset. Maybe `to_protobuf` should fail # on this? The backend certainly will. key._path[-1].pop('kind') @@ -207,7 +241,8 @@ def test_get_explicit_connection_miss(self): cnxn_lookup_result = [] cnxn = _Connection(*cnxn_lookup_result) - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) entity = key.get(connection=cnxn) self.assertEqual(entity, None) @@ -218,7 +253,8 @@ def test_get_implicit_connection_miss(self): cnxn_lookup_result = [] cnxn = _Connection(*cnxn_lookup_result) - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) with _Monkey(_implicit_environ, CONNECTION=cnxn): entity = key.get() self.assertEqual(entity, None) @@ -245,7 +281,8 @@ def test_get_explicit_connection_hit(self): cnxn = _Connection(*cnxn_lookup_result) # Create key and look-up. - key = self._makeOne(KIND, ID) + with self._monkeyDatasetID(): + key = self._makeOne(KIND, ID) entity = key.get(connection=cnxn) self.assertEqual(entity.items(), [('foo', 'Foo')]) self.assertTrue(entity.key is key) @@ -254,7 +291,8 @@ def test_get_no_connection(self): from gcloud.datastore import _implicit_environ self.assertEqual(_implicit_environ.CONNECTION, None) - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) with self.assertRaises(EnvironmentError): key.get() @@ -262,7 +300,8 @@ def test_delete_explicit_connection(self): from gcloud.datastore.test_connection import _Connection cnxn = _Connection() - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) result = key.delete(connection=cnxn) self.assertEqual(result, None) self.assertEqual(cnxn._called_dataset_id, self._DEFAULT_DATASET) @@ -274,7 +313,8 @@ def test_delete_implicit_connection(self): from gcloud.datastore.test_connection import _Connection cnxn = _Connection() - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) with _Monkey(_implicit_environ, CONNECTION=cnxn): result = key.delete() @@ -286,62 +326,74 @@ def test_delete_no_connection(self): from gcloud.datastore import _implicit_environ self.assertEqual(_implicit_environ.CONNECTION, None) - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) with self.assertRaises(AttributeError): key.delete() def test_is_partial_no_name_or_id(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') self.assertTrue(key.is_partial) def test_is_partial_w_id(self): _ID = 1234 - key = self._makeOne('KIND', _ID) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', _ID) self.assertFalse(key.is_partial) def test_is_partial_w_name(self): _NAME = 'NAME' - key = self._makeOne('KIND', _NAME) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', _NAME) self.assertFalse(key.is_partial) def test_id_or_name_no_name_or_id(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') self.assertEqual(key.id_or_name, None) def test_id_or_name_no_name_or_id_child(self): - key = self._makeOne('KIND1', 1234, 'KIND2') + with self._monkeyDatasetID(): + key = self._makeOne('KIND1', 1234, 'KIND2') self.assertEqual(key.id_or_name, None) def test_id_or_name_w_id_only(self): _ID = 1234 - key = self._makeOne('KIND', _ID) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', _ID) self.assertEqual(key.id_or_name, _ID) def test_id_or_name_w_name_only(self): _NAME = 'NAME' - key = self._makeOne('KIND', _NAME) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', _NAME) self.assertEqual(key.id_or_name, _NAME) def test_parent_default(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') self.assertEqual(key.parent, None) def test_parent_explicit_top_level(self): - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) self.assertEqual(key.parent, None) def test_parent_explicit_nested(self): _PARENT_KIND = 'KIND1' _PARENT_ID = 1234 _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] - key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') + with self._monkeyDatasetID(): + key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') self.assertEqual(key.parent.path, _PARENT_PATH) def test_parent_multiple_calls(self): _PARENT_KIND = 'KIND1' _PARENT_ID = 1234 _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] - key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') + with self._monkeyDatasetID(): + key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') parent = key.parent self.assertEqual(parent.path, _PARENT_PATH) new_parent = key.parent From f299d31156a45b5d32c26cd1cd122f51bae7609f Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 15:42:25 -0500 Subject: [PATCH 2/7] Valet service. --- gcloud/datastore/key.py | 1 - 1 file changed, 1 deletion(-) diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index 300db3b3cff9..36fe239278f2 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -74,7 +74,6 @@ def __init__(self, *path_args, **kwargs): # _combine_args() is called. self._path = self._combine_args() - @staticmethod def _parse_path(path_args): """Parses positional arguments into key path with kinds and IDs. From e117ca07e56a3503445d54568a93182b6d533d56 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 16:00:20 -0500 Subject: [PATCH 3/7] Replace most monkeys of implicit environ w/ explicit dataset IDs. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/501#discussion_r22613948 --- gcloud/datastore/test_key.py | 147 ++++++++++++++++------------------- 1 file changed, 65 insertions(+), 82 deletions(-) diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index 1d294297a373..20165e93b19a 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -61,8 +61,7 @@ def test_ctor_w_implicit_dataset_id(self): def test_ctor_w_implicit_dataset_id_empty_path(self): _DATASET = 'DATASET' - with self._monkeyDatasetID(_DATASET): - self.assertRaises(ValueError, self._makeOne) + self.assertRaises(ValueError, self._makeOne, dataset=_DATASET) def test_ctor_parent(self): _PARENT_KIND = 'KIND1' @@ -77,9 +76,9 @@ def test_ctor_parent(self): ] parent_key = self._makeOne(_PARENT_KIND, _PARENT_ID, dataset_id=_PARENT_DATASET, - namespace=_PARENT_NAMESPACE) - with self._monkeyDatasetID(): - key = self._makeOne(_CHILD_KIND, _CHILD_ID, parent=parent_key) + namespace=_PARENT_NAMESPACE, + ) + key = self._makeOne(_CHILD_KIND, _CHILD_ID, parent=parent_key) self.assertEqual(key.dataset_id, parent_key.dataset_id) self.assertEqual(key.namespace, parent_key.namespace) self.assertEqual(key.kind, _CHILD_KIND) @@ -87,29 +86,33 @@ def test_ctor_parent(self): self.assertTrue(key.parent is parent_key) def test_ctor_partial_parent(self): - with self._monkeyDatasetID(): - parent_key = self._makeOne('KIND') - with self.assertRaises(ValueError): - self._makeOne('KIND2', 1234, parent=parent_key) + parent_key = self._makeOne('KIND', dataset_id=self._DEFAULT_DATASET) + with self.assertRaises(ValueError): + self._makeOne('KIND2', 1234, parent=parent_key) def test_ctor_parent_bad_type(self): - with self._monkeyDatasetID(): - with self.assertRaises(AttributeError): - self._makeOne('KIND2', 1234, parent=('KIND1', 1234)) + with self.assertRaises(AttributeError): + self._makeOne('KIND2', 1234, parent=('KIND1', 1234), + dataset_id=self._DEFAULT_DATASET) def test_ctor_parent_bad_namespace(self): - with self._monkeyDatasetID(): - parent_key = self._makeOne('KIND', 1234, namespace='FOO') - with self.assertRaises(ValueError): - self._makeOne( - 'KIND2', 1234, namespace='BAR', parent=parent_key) + parent_key = self._makeOne('KIND', 1234, namespace='FOO', + dataset_id=self._DEFAULT_DATASET) + with self.assertRaises(ValueError): + self._makeOne('KIND2', 1234, namespace='BAR', parent=parent_key, + dataset_id=self._DEFAULT_DATASET) def test_ctor_parent_bad_dataset_id(self): parent_key = self._makeOne('KIND', 1234, dataset_id='FOO') - with self._monkeyDatasetID(): - with self.assertRaises(ValueError): - self._makeOne('KIND2', 1234, dataset_id='BAR', - parent=parent_key) + with self.assertRaises(ValueError): + self._makeOne('KIND2', 1234, parent=parent_key, + dataset_id='BAR') + + def test_ctor_parent_empty_path(self): + parent_key = self._makeOne('KIND', 1234, + dataset_id=self._DEFAULT_DATASET) + with self.assertRaises(ValueError): + self._makeOne(parent=parent_key) def test_ctor_explicit(self): _DATASET = 'DATASET-ALT' @@ -125,15 +128,17 @@ def test_ctor_explicit(self): self.assertEqual(key.path, _PATH) def test_ctor_bad_kind(self): - with self._monkeyDatasetID(): - self.assertRaises(ValueError, self._makeOne, object()) + self.assertRaises(ValueError, + self._makeOne, object(), dataset_id=self._DEFAULT_DATASET) def test_ctor_bad_id_or_name(self): - with self._monkeyDatasetID(): - self.assertRaises(ValueError, self._makeOne, 'KIND', object()) - self.assertRaises(ValueError, self._makeOne, 'KIND', None) - self.assertRaises(ValueError, - self._makeOne, 'KIND', 10, 'KIND2', None) + self.assertRaises(ValueError, self._makeOne, 'KIND', object(), + dataset_id=self._DEFAULT_DATASET) + self.assertRaises(ValueError, self._makeOne, 'KIND', None, + dataset_id=self._DEFAULT_DATASET) + self.assertRaises(ValueError, + self._makeOne, 'KIND', 10, 'KIND2', None, + dataset_id=self._DEFAULT_DATASET) def test__clone(self): _DATASET = 'DATASET-ALT' @@ -150,8 +155,7 @@ def test__clone(self): self.assertEqual(clone.path, _PATH) def test_completed_key_on_partial_w_id(self): - with self._monkeyDatasetID(): - key = self._makeOne('KIND') + key = self._makeOne('KIND', dataset_id=self._DEFAULT_DATASET) _ID = 1234 new_key = key.completed_key(_ID) self.assertFalse(key is new_key) @@ -159,8 +163,7 @@ def test_completed_key_on_partial_w_id(self): self.assertEqual(new_key.name, None) def test_completed_key_on_partial_w_name(self): - with self._monkeyDatasetID(): - key = self._makeOne('KIND') + key = self._makeOne('KIND', dataset_id=self._DEFAULT_DATASET) _NAME = 'NAME' new_key = key.completed_key(_NAME) self.assertFalse(key is new_key) @@ -168,20 +171,17 @@ def test_completed_key_on_partial_w_name(self): self.assertEqual(new_key.name, _NAME) def test_completed_key_on_partial_w_invalid(self): - with self._monkeyDatasetID(): - key = self._makeOne('KIND') + key = self._makeOne('KIND', dataset_id=self._DEFAULT_DATASET) self.assertRaises(ValueError, key.completed_key, object()) def test_completed_key_on_complete(self): - with self._monkeyDatasetID(): - key = self._makeOne('KIND', 1234) + key = self._makeOne('KIND', 1234, dataset_id=self._DEFAULT_DATASET) self.assertRaises(ValueError, key.completed_key, 5678) def test_to_protobuf_defaults(self): from gcloud.datastore.datastore_v1_pb2 import Key as KeyPB _KIND = 'KIND' - with self._monkeyDatasetID(): - key = self._makeOne(_KIND) + key = self._makeOne(_KIND, dataset_id=self._DEFAULT_DATASET) pb = key.to_protobuf() self.assertTrue(isinstance(pb, KeyPB)) @@ -200,15 +200,14 @@ def test_to_protobuf_defaults(self): def test_to_protobuf_w_explicit_dataset_id(self): _DATASET = 'DATASET-ALT' - with self._monkeyDatasetID(): - key = self._makeOne('KIND', dataset_id=_DATASET) + key = self._makeOne('KIND', dataset_id=_DATASET) pb = key.to_protobuf() self.assertEqual(pb.partition_id.dataset_id, _DATASET) def test_to_protobuf_w_explicit_namespace(self): _NAMESPACE = 'NAMESPACE' - with self._monkeyDatasetID(): - key = self._makeOne('KIND', namespace=_NAMESPACE) + key = self._makeOne('KIND', namespace=_NAMESPACE, + dataset_id=self._DEFAULT_DATASET) pb = key.to_protobuf() self.assertEqual(pb.partition_id.namespace, _NAMESPACE) @@ -217,8 +216,8 @@ def test_to_protobuf_w_explicit_path(self): _CHILD = 'CHILD' _ID = 1234 _NAME = 'NAME' - with self._monkeyDatasetID(): - key = self._makeOne(_PARENT, _NAME, _CHILD, _ID) + key = self._makeOne(_PARENT, _NAME, _CHILD, _ID, + dataset_id=self._DEFAULT_DATASET) pb = key.to_protobuf() elems = list(pb.path_element) self.assertEqual(len(elems), 2) @@ -228,8 +227,7 @@ def test_to_protobuf_w_explicit_path(self): self.assertEqual(elems[1].id, _ID) def test_to_protobuf_w_no_kind(self): - with self._monkeyDatasetID(): - key = self._makeOne('KIND') + key = self._makeOne('KIND', dataset_id=self._DEFAULT_DATASET) # Force the 'kind' to be unset. Maybe `to_protobuf` should fail # on this? The backend certainly will. key._path[-1].pop('kind') @@ -241,8 +239,7 @@ def test_get_explicit_connection_miss(self): cnxn_lookup_result = [] cnxn = _Connection(*cnxn_lookup_result) - with self._monkeyDatasetID(): - key = self._makeOne('KIND', 1234) + key = self._makeOne('KIND', 1234, dataset_id=self._DEFAULT_DATASET) entity = key.get(connection=cnxn) self.assertEqual(entity, None) @@ -253,8 +250,7 @@ def test_get_implicit_connection_miss(self): cnxn_lookup_result = [] cnxn = _Connection(*cnxn_lookup_result) - with self._monkeyDatasetID(): - key = self._makeOne('KIND', 1234) + key = self._makeOne('KIND', 1234, dataset_id=self._DEFAULT_DATASET) with _Monkey(_implicit_environ, CONNECTION=cnxn): entity = key.get() self.assertEqual(entity, None) @@ -281,8 +277,7 @@ def test_get_explicit_connection_hit(self): cnxn = _Connection(*cnxn_lookup_result) # Create key and look-up. - with self._monkeyDatasetID(): - key = self._makeOne(KIND, ID) + key = self._makeOne(KIND, ID, dataset_id=self._DEFAULT_DATASET) entity = key.get(connection=cnxn) self.assertEqual(entity.items(), [('foo', 'Foo')]) self.assertTrue(entity.key is key) @@ -291,8 +286,7 @@ def test_get_no_connection(self): from gcloud.datastore import _implicit_environ self.assertEqual(_implicit_environ.CONNECTION, None) - with self._monkeyDatasetID(): - key = self._makeOne('KIND', 1234) + key = self._makeOne('KIND', 1234, dataset_id=self._DEFAULT_DATASET) with self.assertRaises(EnvironmentError): key.get() @@ -300,8 +294,7 @@ def test_delete_explicit_connection(self): from gcloud.datastore.test_connection import _Connection cnxn = _Connection() - with self._monkeyDatasetID(): - key = self._makeOne('KIND', 1234) + key = self._makeOne('KIND', 1234, dataset_id=self._DEFAULT_DATASET) result = key.delete(connection=cnxn) self.assertEqual(result, None) self.assertEqual(cnxn._called_dataset_id, self._DEFAULT_DATASET) @@ -313,8 +306,7 @@ def test_delete_implicit_connection(self): from gcloud.datastore.test_connection import _Connection cnxn = _Connection() - with self._monkeyDatasetID(): - key = self._makeOne('KIND', 1234) + key = self._makeOne('KIND', 1234, dataset_id=self._DEFAULT_DATASET) with _Monkey(_implicit_environ, CONNECTION=cnxn): result = key.delete() @@ -326,74 +318,65 @@ def test_delete_no_connection(self): from gcloud.datastore import _implicit_environ self.assertEqual(_implicit_environ.CONNECTION, None) - with self._monkeyDatasetID(): - key = self._makeOne('KIND', 1234) + key = self._makeOne('KIND', 1234, dataset_id=self._DEFAULT_DATASET) with self.assertRaises(AttributeError): key.delete() def test_is_partial_no_name_or_id(self): - with self._monkeyDatasetID(): - key = self._makeOne('KIND') + key = self._makeOne('KIND', dataset_id=self._DEFAULT_DATASET) self.assertTrue(key.is_partial) def test_is_partial_w_id(self): _ID = 1234 - with self._monkeyDatasetID(): - key = self._makeOne('KIND', _ID) + key = self._makeOne('KIND', _ID, dataset_id=self._DEFAULT_DATASET) self.assertFalse(key.is_partial) def test_is_partial_w_name(self): _NAME = 'NAME' - with self._monkeyDatasetID(): - key = self._makeOne('KIND', _NAME) + key = self._makeOne('KIND', _NAME, dataset_id=self._DEFAULT_DATASET) self.assertFalse(key.is_partial) def test_id_or_name_no_name_or_id(self): - with self._monkeyDatasetID(): - key = self._makeOne('KIND') + key = self._makeOne('KIND', dataset_id=self._DEFAULT_DATASET) self.assertEqual(key.id_or_name, None) def test_id_or_name_no_name_or_id_child(self): - with self._monkeyDatasetID(): - key = self._makeOne('KIND1', 1234, 'KIND2') + key = self._makeOne('KIND1', 1234, 'KIND2', + dataset_id=self._DEFAULT_DATASET) self.assertEqual(key.id_or_name, None) def test_id_or_name_w_id_only(self): _ID = 1234 - with self._monkeyDatasetID(): - key = self._makeOne('KIND', _ID) + key = self._makeOne('KIND', _ID, dataset_id=self._DEFAULT_DATASET) self.assertEqual(key.id_or_name, _ID) def test_id_or_name_w_name_only(self): _NAME = 'NAME' - with self._monkeyDatasetID(): - key = self._makeOne('KIND', _NAME) + key = self._makeOne('KIND', _NAME, dataset_id=self._DEFAULT_DATASET) self.assertEqual(key.id_or_name, _NAME) def test_parent_default(self): - with self._monkeyDatasetID(): - key = self._makeOne('KIND') + key = self._makeOne('KIND', dataset_id=self._DEFAULT_DATASET) self.assertEqual(key.parent, None) def test_parent_explicit_top_level(self): - with self._monkeyDatasetID(): - key = self._makeOne('KIND', 1234) + key = self._makeOne('KIND', 1234, dataset_id=self._DEFAULT_DATASET) self.assertEqual(key.parent, None) def test_parent_explicit_nested(self): _PARENT_KIND = 'KIND1' _PARENT_ID = 1234 _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] - with self._monkeyDatasetID(): - key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') + key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2', + dataset_id=self._DEFAULT_DATASET) self.assertEqual(key.parent.path, _PARENT_PATH) def test_parent_multiple_calls(self): _PARENT_KIND = 'KIND1' _PARENT_ID = 1234 _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] - with self._monkeyDatasetID(): - key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') + key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2', + dataset_id=self._DEFAULT_DATASET) parent = key.parent self.assertEqual(parent.path, _PARENT_PATH) new_parent = key.parent From eead3e1de23fecaf96a4c5a5c486c4939d6de174 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 16:01:40 -0500 Subject: [PATCH 4/7] Use correct Sphinx role. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/501#discussion_r22613788 --- gcloud/datastore/entity.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 47efe59639fa..8dcde3bc3b13 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -40,7 +40,7 @@ class Entity(dict): This means you could take an existing entity and change the key to duplicate the object. - Use :func:`gcloud.datastore.key.Key.get` to retrieve an existing entity. + Use :metho:`gcloud.datastore.key.Key.get` to retrieve an existing entity. >>> key.get() From 6a87fbab5017917b3c2d6b89420f0b4eb5df95a3 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 16:07:14 -0500 Subject: [PATCH 5/7] Use 'ID' instead of 'id' consistently in docstrings. When used as an abbreviation for 'identifier', rather than to refer to methods named 'id'. --- gcloud/datastore/connection.py | 6 +++--- gcloud/datastore/key.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index 8b6d18f22926..08fb94c4b78c 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -372,7 +372,7 @@ def rollback(self, dataset_id): if the connection isn't currently in a transaction. :type dataset_id: string - :param dataset_id: The id of the dataset to which the transaction + :param dataset_id: The ID of the dataset to which the transaction belongs. """ if not self.transaction() or not self.transaction().id: @@ -390,7 +390,7 @@ def allocate_ids(self, dataset_id, key_pbs): Maps the ``DatastoreService.AllocateIds`` protobuf RPC. :type dataset_id: string - :param dataset_id: The id of the dataset to which the transaction + :param dataset_id: The ID of the dataset to which the transaction belongs. :type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key` @@ -416,7 +416,7 @@ def save_entity(self, dataset_id, key_pb, properties, not passed in 'properties' no longer be set for the entity. :type dataset_id: string - :param dataset_id: The id of the dataset in which to save the entity. + :param dataset_id: The ID of the dataset in which to save the entity. :type key_pb: :class:`gcloud.datastore.datastore_v1_pb2.Key` :param key_pb: The complete or partial key for the entity. diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index 36fe239278f2..563ec315c579 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -80,11 +80,11 @@ def _parse_path(path_args): :type path_args: :class:`tuple` :param path_args: A tuple from positional arguments. Should be - alternating list of kinds (string) and id/name + alternating list of kinds (string) and ID/name parts (int or string). :rtype: list of dict - :returns: A list of key parts with kind and id or name set. + :returns: A list of key parts with kind and ID or name set. :raises: `ValueError` if there are no `path_args`, if one of the kinds is not a string or if one of the IDs/names is not a string or an integer. @@ -126,7 +126,7 @@ def _combine_args(self): _namespace and _dataset_id if not already set. :rtype: list of dict - :returns: A list of key parts with kind and id or name set. + :returns: A list of key parts with kind and ID or name set. :raises: `ValueError` if the parent key is not complete. """ child_path = self._parse_path(self._flat_path) From a87a14361245f2c3dd098d6b7e7a647ad8c4f0a0 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 17:09:14 -0500 Subject: [PATCH 6/7] Typo. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/501#discussion-diff-22616137 --- gcloud/datastore/test_key.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index 20165e93b19a..a91f20bca408 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -61,7 +61,7 @@ def test_ctor_w_implicit_dataset_id(self): def test_ctor_w_implicit_dataset_id_empty_path(self): _DATASET = 'DATASET' - self.assertRaises(ValueError, self._makeOne, dataset=_DATASET) + self.assertRaises(ValueError, self._makeOne, dataset_id=_DATASET) def test_ctor_parent(self): _PARENT_KIND = 'KIND1' From 90829f5777db997826e473378d2d5358866c3504 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 17:12:53 -0500 Subject: [PATCH 7/7] Chixen bonez. --- gcloud/datastore/test_key.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index a91f20bca408..1022385ce674 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -76,8 +76,7 @@ def test_ctor_parent(self): ] parent_key = self._makeOne(_PARENT_KIND, _PARENT_ID, dataset_id=_PARENT_DATASET, - namespace=_PARENT_NAMESPACE, - ) + namespace=_PARENT_NAMESPACE) key = self._makeOne(_CHILD_KIND, _CHILD_ID, parent=parent_key) self.assertEqual(key.dataset_id, parent_key.dataset_id) self.assertEqual(key.namespace, parent_key.namespace) @@ -97,7 +96,7 @@ def test_ctor_parent_bad_type(self): def test_ctor_parent_bad_namespace(self): parent_key = self._makeOne('KIND', 1234, namespace='FOO', - dataset_id=self._DEFAULT_DATASET) + dataset_id=self._DEFAULT_DATASET) with self.assertRaises(ValueError): self._makeOne('KIND2', 1234, namespace='BAR', parent=parent_key, dataset_id=self._DEFAULT_DATASET) @@ -128,17 +127,16 @@ def test_ctor_explicit(self): self.assertEqual(key.path, _PATH) def test_ctor_bad_kind(self): - self.assertRaises(ValueError, - self._makeOne, object(), dataset_id=self._DEFAULT_DATASET) + self.assertRaises(ValueError, self._makeOne, object(), + dataset_id=self._DEFAULT_DATASET) def test_ctor_bad_id_or_name(self): self.assertRaises(ValueError, self._makeOne, 'KIND', object(), - dataset_id=self._DEFAULT_DATASET) + dataset_id=self._DEFAULT_DATASET) self.assertRaises(ValueError, self._makeOne, 'KIND', None, - dataset_id=self._DEFAULT_DATASET) - self.assertRaises(ValueError, - self._makeOne, 'KIND', 10, 'KIND2', None, - dataset_id=self._DEFAULT_DATASET) + dataset_id=self._DEFAULT_DATASET) + self.assertRaises(ValueError, self._makeOne, 'KIND', 10, 'KIND2', None, + dataset_id=self._DEFAULT_DATASET) def test__clone(self): _DATASET = 'DATASET-ALT'