From 95e9473b067e0c4a2828badc127d977cfb0719fa Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 13:31:04 -0500 Subject: [PATCH 1/9] Don't document use of 'Dataset' or its factory methods. Toward #477, step 8. --- gcloud/datastore/connection.py | 34 ++++++++++++++++------------------ gcloud/datastore/entity.py | 5 ++--- 2 files changed, 18 insertions(+), 21 deletions(-) diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index eeacc44ba35c..5402d96c1ea2 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -163,17 +163,13 @@ def lookup(self, dataset_id, key_pbs, (:class:`gcloud.datastore.datastore_v1_pb2.Key` and :class:`gcloud.datastore.datastore_v1_pb2.Entity`) and is used under the hood for methods like - :func:`gcloud.datastore.dataset.Dataset.get_entity`: + :func:`gcloud.datastore.key.Key.get`: >>> from gcloud import datastore >>> from gcloud.datastore.key import Key >>> connection = datastore.get_connection() - >>> dataset = connection.dataset('dataset-id') - >>> key = Key(dataset=dataset).kind('MyKind').id(1234) - - Using the :class:`gcloud.datastore.dataset.Dataset` helper: - - >>> dataset.get_entity(key) + >>> key = Key('MyKind', 1234, dataset_id='dataset-id') + >>> key.get() Using the ``connection`` class directly: @@ -182,7 +178,7 @@ def lookup(self, dataset_id, key_pbs, :type dataset_id: string - :param dataset_id: The dataset to look up the keys. + :param dataset_id: The ID of the dataset to look up the keys. :type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key` (or a single Key) @@ -262,12 +258,12 @@ def run_query(self, dataset_id, query_pb, namespace=None, eventual=False): uses this method to fetch data: >>> from gcloud import datastore + >>> from gcloud.datastore.query import Query >>> connection = datastore.get_connection() - >>> dataset = connection.dataset('dataset-id') - >>> query = dataset.query().kind('MyKind').filter( - ... 'property', '=', 'val') + >>> query = Query(dataset_id='dataset-id', 'MyKind') + >>> query.add_filter('property', '=', 'val') - Using the `fetch`` method... + Using the query's ``fetch_page`` method... >>> entities, cursor, more_results = query.fetch_page() >>> entities @@ -319,7 +315,7 @@ def begin_transaction(self, dataset_id, serializable=False): Maps the ``DatastoreService.BeginTransaction`` protobuf RPC. :type dataset_id: string - :param dataset_id: The dataset over which to execute the transaction. + :param dataset_id: The ID dataset over which to execute the transaction. """ if self.transaction(): @@ -346,7 +342,7 @@ def commit(self, dataset_id, mutation_pb): Maps the ``DatastoreService.Commit`` protobuf RPC. :type dataset_id: string - :param dataset_id: The dataset in which to perform the changes. + :param dataset_id: The id of the dataset in which to perform the changes. :type mutation_pb: :class:`gcloud.datastore.datastore_v1_pb2.Mutation`. :param mutation_pb: The protobuf for the mutations being saved. @@ -376,7 +372,8 @@ def rollback(self, dataset_id): if the connection isn't currently in a transaction. :type dataset_id: string - :param dataset_id: The dataset to which the transaction belongs. + :param dataset_id: The id of the dataset to which the transaction + belongs. """ if not self.transaction() or not self.transaction().id: raise ValueError('No transaction to rollback.') @@ -393,7 +390,8 @@ def allocate_ids(self, dataset_id, key_pbs): Maps the ``DatastoreService.AllocateIds`` protobuf RPC. :type dataset_id: string - :param dataset_id: The dataset to which the transaction belongs. + :param dataset_id: The id of the dataset to which the transaction + belongs. :type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key` :param key_pbs: The keys for which the backend should allocate IDs. @@ -418,7 +416,7 @@ def save_entity(self, dataset_id, key_pb, properties, not passed in 'properties' no longer be set for the entity. :type dataset_id: string - :param dataset_id: The dataset in which to save the entity. + :param dataset_id: The id of the dataset in which to save the entity. :type key_pb: :class:`gcloud.datastore.datastore_v1_pb2.Key` :param key_pb: The complete or partial key for the entity. @@ -490,7 +488,7 @@ def delete_entities(self, dataset_id, key_pbs): :func:`gcloud.datastore.entity.Entity.delete` method. :type dataset_id: string - :param dataset_id: The dataset from which to delete the keys. + :param dataset_id: The ID of the dataset from which to delete the keys. :type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key` :param key_pbs: The keys to delete from the datastore. diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 479f136f43e9..47efe59639fa 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -40,10 +40,9 @@ class Entity(dict): This means you could take an existing entity and change the key to duplicate the object. - Use :func:`gcloud.datastore.dataset.Dataset.get_entity` - to retrieve an existing entity. + Use :func:`gcloud.datastore.key.Key.get` to retrieve an existing entity. - >>> dataset.get_entity(key) + >>> key.get() You can the set values on the entity just like you would on any From 8708b587fa2b7879c2f01d5ddd26ed7e6726bdc8 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 13:32:11 -0500 Subject: [PATCH 2/9] Ensure that we have an implicit dataset ID. Toward #477, step 8. --- gcloud/datastore/test___init__.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/gcloud/datastore/test___init__.py b/gcloud/datastore/test___init__.py index 2146d8fdc2f9..8221acda7332 100644 --- a/gcloud/datastore/test___init__.py +++ b/gcloud/datastore/test___init__.py @@ -381,7 +381,9 @@ def test_allocate_ids_implicit(self): CUSTOM_DATASET = _Dataset() CUSTOM_CONNECTION = _Connection() NUM_IDS = 2 - with _Monkey(_implicit_environ, DATASET=CUSTOM_DATASET, + with _Monkey(_implicit_environ, + DATASET=CUSTOM_DATASET, + DATASET_ID=CUSTOM_DATASET.id(), CONNECTION=CUSTOM_CONNECTION): INCOMPLETE_KEY = Key('KIND') result = self._callFUT(INCOMPLETE_KEY, NUM_IDS) @@ -398,7 +400,9 @@ def test_allocate_ids_with_complete(self): CUSTOM_DATASET = _Dataset() CUSTOM_CONNECTION = _Connection() - with _Monkey(_implicit_environ, DATASET=CUSTOM_DATASET, + with _Monkey(_implicit_environ, + DATASET=CUSTOM_DATASET, + DATASET_ID=CUSTOM_DATASET.id(), CONNECTION=CUSTOM_CONNECTION): COMPLETE_KEY = Key('KIND', 1234) self.assertRaises(ValueError, self._callFUT, From 7a61f9491b24f79b874dd74eb748dfb2b806076a Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 13:33:35 -0500 Subject: [PATCH 3/9] Ensure that 'Key.__init__' uses implicit dataset ID when appropriate. Toward #477, step 8. --- gcloud/datastore/key.py | 44 +++++----- gcloud/datastore/test_key.py | 157 +++++++++++++++++++++++------------ 2 files changed, 127 insertions(+), 74 deletions(-) diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index 2b289d452eba..eb82aea13ea7 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -66,28 +66,13 @@ def __init__(self, *path_args, **kwargs): keyword argument. """ self._flat_path = path_args - self._parent = kwargs.get('parent') + parent = self._parent = kwargs.get('parent') self._namespace = kwargs.get('namespace') - self._dataset_id = kwargs.get('dataset_id') + dataset_id = kwargs.get('dataset_id') + self._dataset_id = _validate_dataset_id(dataset_id, parent) # _flat_path, _parent, _namespace and _dataset_id must be set before # _combine_args() is called. self._path = self._combine_args() - self._validate_dataset_id() - - def _validate_dataset_id(self): - """Ensures the dataset ID is set. - - If unset, attempts to imply the ID from the environment. - - :raises: `ValueError` if there is no `dataset_id` and none - can be implied. - """ - if self._dataset_id is None: - if _implicit_environ.DATASET is not None: - # This assumes DATASET.id() is not None. - self._dataset_id = _implicit_environ.DATASET.id() - else: - raise ValueError('A Key must have a dataset ID set.') @staticmethod def _parse_path(path_args): @@ -345,7 +330,7 @@ def dataset_id(self): """Dataset ID getter. :rtype: :class:`str` - :returns: The key's dataset. + :returns: The key's dataset ID. """ return self._dataset_id @@ -384,3 +369,24 @@ def parent(self): def __repr__(self): return '' % (self.path, self.dataset_id) + +def _validate_dataset_id(dataset_id, parent): + """Ensure the dataset ID is set appropriately. + + If ``parent`` is passed, skip the test (it will be checked / fixed up + later). + + If ``dataset_id`` is unset, attempt to infer the ID from the environment. + + :raises: `ValueError` if ``dataset_id`` is None and none can be inferred. + """ + if parent is None: + + if dataset_id is None: + + if _implicit_environ.DATASET_ID is None: + raise ValueError("A Key must have a dataset ID set.") + + dataset_id = _implicit_environ.DATASET_ID + + return dataset_id diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index beb6fd92ef4a..cc2befcededc 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -17,8 +17,9 @@ class TestKey(unittest2.TestCase): + _DEFAULT_DATASET = 'DATASET' + def setUp(self): - self._DEFAULT_DATASET = 'DATASET' from gcloud.datastore import _implicit_environ self._replaced_dataset = _implicit_environ.DATASET @@ -31,41 +32,53 @@ def tearDown(self): _implicit_environ.DATASET_ID = self._replaced_dataset_id def _getTargetClass(self): - from gcloud.datastore import _implicit_environ - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key - - _implicit_environ.DATASET = Dataset(self._DEFAULT_DATASET) return Key def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) + def _monkeyDatasetID(self, dataset_id=_DEFAULT_DATASET): + from gcloud._testing import _Monkey + from gcloud.datastore import _implicit_environ + return _Monkey(_implicit_environ, DATASET_ID=dataset_id) + def test_ctor_empty(self): self.assertRaises(ValueError, self._makeOne) - def test_ctor_no_dataset(self): - from gcloud._testing import _Monkey - from gcloud.datastore import _implicit_environ + def test_ctor_no_dataset_id(self): klass = self._getTargetClass() - with _Monkey(_implicit_environ, DATASET=None): + with self._monkeyDatasetID(None): self.assertRaises(ValueError, klass, 'KIND') + def test_ctor_w_implicit_dataset_id(self): + _DATASET = 'DATASET' + _KIND = 'KIND' + klass = self._getTargetClass() + with self._monkeyDatasetID(_DATASET): + key = self._makeOne(_KIND) + self.assertEqual(key.dataset_id, _DATASET) + self.assertEqual(key.namespace, None) + self.assertEqual(key.kind, _KIND) + self.assertEqual(key.path, [{'kind': _KIND}]) + def test_ctor_parent(self): + _DATASET = 'DATASET' _PARENT_KIND = 'KIND1' _PARENT_ID = 1234 _PARENT_DATASET = 'DATASET-ALT' _PARENT_NAMESPACE = 'NAMESPACE' - parent_key = self._makeOne(_PARENT_KIND, _PARENT_ID, - dataset_id=_PARENT_DATASET, - namespace=_PARENT_NAMESPACE) _CHILD_KIND = 'KIND2' _CHILD_ID = 2345 _PATH = [ {'kind': _PARENT_KIND, 'id': _PARENT_ID}, {'kind': _CHILD_KIND, 'id': _CHILD_ID}, ] - key = self._makeOne(_CHILD_KIND, _CHILD_ID, parent=parent_key) + parent_key = self._makeOne(_PARENT_KIND, _PARENT_ID, + dataset_id=_PARENT_DATASET, + namespace=_PARENT_NAMESPACE) + with self._monkeyDatasetID(): + key = self._makeOne(_CHILD_KIND, _CHILD_ID, parent=parent_key) self.assertEqual(key.dataset_id, parent_key.dataset_id) self.assertEqual(key.namespace, parent_key.namespace) self.assertEqual(key.kind, _CHILD_KIND) @@ -73,23 +86,28 @@ def test_ctor_parent(self): self.assertTrue(key.parent is parent_key) def test_ctor_partial_parent(self): - parent_key = self._makeOne('KIND') - with self.assertRaises(ValueError): - self._makeOne('KIND2', 1234, parent=parent_key) + with self._monkeyDatasetID(): + parent_key = self._makeOne('KIND') + with self.assertRaises(ValueError): + self._makeOne('KIND2', 1234, parent=parent_key) def test_ctor_parent_bad_type(self): - with self.assertRaises(AttributeError): - self._makeOne('KIND2', 1234, parent=('KIND1', 1234)) + with self._monkeyDatasetID(): + with self.assertRaises(AttributeError): + self._makeOne('KIND2', 1234, parent=('KIND1', 1234)) def test_ctor_parent_bad_namespace(self): - parent_key = self._makeOne('KIND', 1234, namespace='FOO') - with self.assertRaises(ValueError): - self._makeOne('KIND2', 1234, namespace='BAR', parent=parent_key) + with self._monkeyDatasetID(): + parent_key = self._makeOne('KIND', 1234, namespace='FOO') + with self.assertRaises(ValueError): + self._makeOne('KIND2', 1234, namespace='BAR', parent=parent_key) def test_ctor_parent_bad_dataset_id(self): parent_key = self._makeOne('KIND', 1234, dataset_id='FOO') - with self.assertRaises(ValueError): - self._makeOne('KIND2', 1234, dataset_id='BAR', parent=parent_key) + with self._monkeyDatasetID(): + with self.assertRaises(ValueError): + self._makeOne('KIND2', 1234, dataset_id='BAR', + parent=parent_key) def test_ctor_explicit(self): _DATASET = 'DATASET-ALT' @@ -99,19 +117,21 @@ def test_ctor_explicit(self): _PATH = [{'kind': _KIND, 'id': _ID}] key = self._makeOne(_KIND, _ID, namespace=_NAMESPACE, dataset_id=_DATASET) - self.assertNotEqual(_DATASET, self._DEFAULT_DATASET) self.assertEqual(key.dataset_id, _DATASET) self.assertEqual(key.namespace, _NAMESPACE) self.assertEqual(key.kind, _KIND) self.assertEqual(key.path, _PATH) def test_ctor_bad_kind(self): - self.assertRaises(ValueError, self._makeOne, object()) + with self._monkeyDatasetID(): + self.assertRaises(ValueError, self._makeOne, object()) def test_ctor_bad_id_or_name(self): - self.assertRaises(ValueError, self._makeOne, 'KIND', object()) - self.assertRaises(ValueError, self._makeOne, 'KIND', None) - self.assertRaises(ValueError, self._makeOne, 'KIND', 10, 'KIND2', None) + with self._monkeyDatasetID(): + self.assertRaises(ValueError, self._makeOne, 'KIND', object()) + self.assertRaises(ValueError, self._makeOne, 'KIND', None) + self.assertRaises(ValueError, + self._makeOne, 'KIND', 10, 'KIND2', None) def test__clone(self): _DATASET = 'DATASET-ALT' @@ -128,7 +148,8 @@ def test__clone(self): self.assertEqual(clone.path, _PATH) def test_completed_key_on_partial_w_id(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') _ID = 1234 new_key = key.completed_key(_ID) self.assertFalse(key is new_key) @@ -136,7 +157,8 @@ def test_completed_key_on_partial_w_id(self): self.assertEqual(new_key.name, None) def test_completed_key_on_partial_w_name(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') _NAME = 'NAME' new_key = key.completed_key(_NAME) self.assertFalse(key is new_key) @@ -144,17 +166,20 @@ def test_completed_key_on_partial_w_name(self): self.assertEqual(new_key.name, _NAME) def test_completed_key_on_partial_w_invalid(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') self.assertRaises(ValueError, key.completed_key, object()) def test_completed_key_on_complete(self): - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID() as monkey: + key = self._makeOne('KIND', 1234) self.assertRaises(ValueError, key.completed_key, 5678) def test_to_protobuf_defaults(self): from gcloud.datastore.datastore_v1_pb2 import Key as KeyPB _KIND = 'KIND' - key = self._makeOne(_KIND) + with self._monkeyDatasetID(): + key = self._makeOne(_KIND) pb = key.to_protobuf() self.assertTrue(isinstance(pb, KeyPB)) @@ -173,13 +198,15 @@ def test_to_protobuf_defaults(self): def test_to_protobuf_w_explicit_dataset(self): _DATASET = 'DATASET-ALT' - key = self._makeOne('KIND', dataset_id=_DATASET) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', dataset_id=_DATASET) pb = key.to_protobuf() self.assertEqual(pb.partition_id.dataset_id, _DATASET) def test_to_protobuf_w_explicit_namespace(self): _NAMESPACE = 'NAMESPACE' - key = self._makeOne('KIND', namespace=_NAMESPACE) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', namespace=_NAMESPACE) pb = key.to_protobuf() self.assertEqual(pb.partition_id.namespace, _NAMESPACE) @@ -188,7 +215,8 @@ def test_to_protobuf_w_explicit_path(self): _CHILD = 'CHILD' _ID = 1234 _NAME = 'NAME' - key = self._makeOne(_PARENT, _NAME, _CHILD, _ID) + with self._monkeyDatasetID(): + key = self._makeOne(_PARENT, _NAME, _CHILD, _ID) pb = key.to_protobuf() elems = list(pb.path_element) self.assertEqual(len(elems), 2) @@ -198,7 +226,8 @@ def test_to_protobuf_w_explicit_path(self): self.assertEqual(elems[1].id, _ID) def test_to_protobuf_w_no_kind(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') # Force the 'kind' to be unset. Maybe `to_protobuf` should fail # on this? The backend certainly will. key._path[-1].pop('kind') @@ -210,7 +239,8 @@ def test_get_explicit_connection_miss(self): cnxn_lookup_result = [] cnxn = _Connection(*cnxn_lookup_result) - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) entity = key.get(connection=cnxn) self.assertEqual(entity, None) @@ -221,7 +251,8 @@ def test_get_implicit_connection_miss(self): cnxn_lookup_result = [] cnxn = _Connection(*cnxn_lookup_result) - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) with _Monkey(_implicit_environ, CONNECTION=cnxn): entity = key.get() self.assertEqual(entity, None) @@ -248,7 +279,8 @@ def test_get_explicit_connection_hit(self): cnxn = _Connection(*cnxn_lookup_result) # Create key and look-up. - key = self._makeOne(KIND, ID) + with self._monkeyDatasetID(): + key = self._makeOne(KIND, ID) entity = key.get(connection=cnxn) self.assertEqual(entity.items(), [('foo', 'Foo')]) self.assertTrue(entity.key is key) @@ -257,7 +289,8 @@ def test_get_no_connection(self): from gcloud.datastore import _implicit_environ self.assertEqual(_implicit_environ.CONNECTION, None) - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) with self.assertRaises(EnvironmentError): key.get() @@ -265,7 +298,8 @@ def test_delete_explicit_connection(self): from gcloud.datastore.test_connection import _Connection cnxn = _Connection() - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) result = key.delete(connection=cnxn) self.assertEqual(result, None) self.assertEqual(cnxn._called_dataset_id, self._DEFAULT_DATASET) @@ -277,7 +311,8 @@ def test_delete_implicit_connection(self): from gcloud.datastore.test_connection import _Connection cnxn = _Connection() - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) with _Monkey(_implicit_environ, CONNECTION=cnxn): result = key.delete() @@ -289,62 +324,74 @@ def test_delete_no_connection(self): from gcloud.datastore import _implicit_environ self.assertEqual(_implicit_environ.CONNECTION, None) - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) with self.assertRaises(AttributeError): key.delete() def test_is_partial_no_name_or_id(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') self.assertTrue(key.is_partial) def test_is_partial_w_id(self): _ID = 1234 - key = self._makeOne('KIND', _ID) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', _ID) self.assertFalse(key.is_partial) def test_is_partial_w_name(self): _NAME = 'NAME' - key = self._makeOne('KIND', _NAME) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', _NAME) self.assertFalse(key.is_partial) def test_id_or_name_no_name_or_id(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') self.assertEqual(key.id_or_name, None) def test_id_or_name_no_name_or_id_child(self): - key = self._makeOne('KIND1', 1234, 'KIND2') + with self._monkeyDatasetID(): + key = self._makeOne('KIND1', 1234, 'KIND2') self.assertEqual(key.id_or_name, None) def test_id_or_name_w_id_only(self): _ID = 1234 - key = self._makeOne('KIND', _ID) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', _ID) self.assertEqual(key.id_or_name, _ID) def test_id_or_name_w_name_only(self): _NAME = 'NAME' - key = self._makeOne('KIND', _NAME) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', _NAME) self.assertEqual(key.id_or_name, _NAME) def test_parent_default(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') self.assertEqual(key.parent, None) def test_parent_explicit_top_level(self): - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) self.assertEqual(key.parent, None) def test_parent_explicit_nested(self): _PARENT_KIND = 'KIND1' _PARENT_ID = 1234 _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] - key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') + with self._monkeyDatasetID(): + key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') self.assertEqual(key.parent.path, _PARENT_PATH) def test_parent_multiple_calls(self): _PARENT_KIND = 'KIND1' _PARENT_ID = 1234 _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] - key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') + with self._monkeyDatasetID(): + key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') parent = key.parent self.assertEqual(parent.path, _PARENT_PATH) new_parent = key.parent From 99af098a90607ffd9cefaa317c0d94c86fe48029 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 14:30:43 -0500 Subject: [PATCH 4/9] Overlooked coverage foul. --- gcloud/datastore/test_key.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index cc2befcededc..cc83ead0424b 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -62,6 +62,13 @@ def test_ctor_w_implicit_dataset_id(self): self.assertEqual(key.kind, _KIND) self.assertEqual(key.path, [{'kind': _KIND}]) + def test_ctor_w_implicit_dataset_id_empty_path(self): + _DATASET = 'DATASET' + _KIND = 'KIND' + klass = self._getTargetClass() + with self._monkeyDatasetID(_DATASET): + self.assertRaises(ValueError, self._makeOne) + def test_ctor_parent(self): _DATASET = 'DATASET' _PARENT_KIND = 'KIND1' From 57e179bffbe26bc5a016d4de21c0e20647f5e5ae Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 14:31:36 -0500 Subject: [PATCH 5/9] Remove dead fixture. --- gcloud/datastore/test_entity.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/gcloud/datastore/test_entity.py b/gcloud/datastore/test_entity.py index 4a54ce7198be..8b4f4e786dc0 100644 --- a/gcloud/datastore/test_entity.py +++ b/gcloud/datastore/test_entity.py @@ -193,16 +193,6 @@ def get(self, connection=None): return self._stored -class _Dataset(dict): - - def __init__(self, connection=None): - super(_Dataset, self).__init__() - self._connection = connection - - def id(self): - return _DATASET_ID - - class _Connection(object): _transaction = _saved = _deleted = None _save_result = (False, None) From ad78f59941219a7e5103873676fe6409a7b4a34f Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 14:34:28 -0500 Subject: [PATCH 6/9] Rework '_require_dataset' -> '_require_dataset_id'. Also, make both it and '_require_connection' take the passed object, and test explicitly against None. --- gcloud/datastore/__init__.py | 44 ++++++++----- gcloud/datastore/test___init__.py | 101 +++++++++++++++++++----------- 2 files changed, 93 insertions(+), 52 deletions(-) diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index 5033aec06b63..ce6483bd10f7 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -138,28 +138,40 @@ def get_dataset(dataset_id): return Dataset(dataset_id, connection=connection) -def _require_dataset(): - """Convenience method to ensure DATASET is set. +def _require_dataset_id(dataset_id=None): + """Infer a dataset ID from the environment, if not passed explicitly. + + :type dataset_id: :class:`str`. + :param dataset_id: Optional. :rtype: :class:`gcloud.datastore.dataset.Dataset` :returns: A dataset based on the current environment. - :raises: :class:`EnvironmentError` if DATASET is not set. + :raises: :class:`EnvironmentError` if ``dataset_id`` is None, + and cannot be inferred from the environment. """ - if _implicit_environ.DATASET is None: - raise EnvironmentError('Dataset could not be inferred.') - return _implicit_environ.DATASET + if dataset_id is None: + if _implicit_environ.DATASET_ID is None: + raise EnvironmentError('Dataset ID could not be inferred.') + dataset_id = _implicit_environ.DATASET_ID + return dataset_id -def _require_connection(): - """Convenience method to ensure CONNECTION is set. +def _require_connection(connection=None): + """Infer a connection from the environment, if not passed explicitly. + + :type connection: :class:`gcloud.datastore.connection.Connection` + :param connection: Optional. :rtype: :class:`gcloud.datastore.connection.Connection` :returns: A connection based on the current environment. - :raises: :class:`EnvironmentError` if CONNECTION is not set. + :raises: :class:`EnvironmentError` if ``connection`` is None, and + cannot be inferred from the environment. """ - if _implicit_environ.CONNECTION is None: - raise EnvironmentError('Connection could not be inferred.') - return _implicit_environ.CONNECTION + if connection is None: + if _implicit_environ.CONNECTION is None: + raise EnvironmentError('Connection could not be inferred.') + connection = _implicit_environ.CONNECTION + return connection def get_entities(keys, missing=None, deferred=None, @@ -188,8 +200,8 @@ def get_entities(keys, missing=None, deferred=None, :rtype: list of :class:`gcloud.datastore.entity.Entity` :returns: The requested entities. """ - connection = connection or _require_connection() - dataset_id = dataset_id or _require_dataset().id() + connection = _require_connection(connection) + dataset_id = _require_dataset_id(dataset_id) entity_pbs = connection.lookup( dataset_id=dataset_id, @@ -233,8 +245,8 @@ def allocate_ids(incomplete_key, num_ids, connection=None, dataset_id=None): :returns: The (complete) keys allocated with `incomplete_key` as root. :raises: `ValueError` if `incomplete_key` is not a partial key. """ - connection = connection or _require_connection() - dataset_id = dataset_id or _require_dataset().id() + connection = _require_connection(connection) + dataset_id = _require_dataset_id(dataset_id) if not incomplete_key.is_partial: raise ValueError(('Key is not partial.', incomplete_key)) diff --git a/gcloud/datastore/test___init__.py b/gcloud/datastore/test___init__.py index 8221acda7332..e3b72043104d 100644 --- a/gcloud/datastore/test___init__.py +++ b/gcloud/datastore/test___init__.py @@ -157,45 +157,79 @@ def test_it(self): self.assertTrue(client._get_app_default_called) -class Test_implicit_behavior(unittest2.TestCase): +class Test__require_dataset_id(unittest2.TestCase): - def test__require_dataset_value_unset(self): - import gcloud.datastore + _MARKER = object() + + def _callFUT(self, passed=_MARKER): + from gcloud.datastore import _require_dataset_id + if passed is self._MARKER: + return _require_dataset_id() + return _require_dataset_id(passed) + + def _monkey(self, dataset_id): from gcloud.datastore import _implicit_environ from gcloud._testing import _Monkey + return _Monkey(_implicit_environ, DATASET_ID=dataset_id) - with _Monkey(_implicit_environ, DATASET=None): + def test__require_dataset_implicit_unset(self): + with self._monkey(None): with self.assertRaises(EnvironmentError): - gcloud.datastore._require_dataset() + self._callFUT() - def test__require_dataset_value_set(self): - import gcloud.datastore - from gcloud.datastore import _implicit_environ - from gcloud._testing import _Monkey + def test__require_dataset_implicit_unset_passed_explicitly(self): + ID = 'DATASET' + with self._monkey(None): + self.assertEqual(self._callFUT(ID), ID) + + def test__require_dataset_id_implicit_set(self): + IMPLICIT_ID = 'IMPLICIT' + with self._monkey(IMPLICIT_ID): + stored_id = self._callFUT() + self.assertTrue(stored_id is IMPLICIT_ID) + + def test__require_dataset_id_implicit_set_passed_explicitly(self): + ID = 'DATASET' + IMPLICIT_ID = 'IMPLICIT' + with self._monkey(IMPLICIT_ID): + self.assertEqual(self._callFUT(ID), ID) + + +class Test_require_connection(unittest2.TestCase): - FAKE_DATASET = object() - with _Monkey(_implicit_environ, DATASET=FAKE_DATASET): - stored_dataset = gcloud.datastore._require_dataset() - self.assertTrue(stored_dataset is FAKE_DATASET) + _MARKER = object() - def test__require_connection_value_unset(self): - import gcloud.datastore + def _callFUT(self, passed=_MARKER): + from gcloud.datastore import _require_connection + if passed is self._MARKER: + return _require_connection() + return _require_connection(passed) + + def _monkey(self, connection): from gcloud.datastore import _implicit_environ from gcloud._testing import _Monkey + return _Monkey(_implicit_environ, CONNECTION=connection) - with _Monkey(_implicit_environ, CONNECTION=None): + def test__require_connection_implicit_unset(self): + with self._monkey(None): with self.assertRaises(EnvironmentError): - gcloud.datastore._require_connection() + self._callFUT() - def test__require_connection_value_set(self): - import gcloud.datastore - from gcloud.datastore import _implicit_environ - from gcloud._testing import _Monkey + def test__require_connection_implicit_unset_passed_explicitly(self): + CONNECTION = object() + with self._monkey(None): + self.assertTrue(self._callFUT(CONNECTION) is CONNECTION) + + def test__require_connection_implicit_set(self): + IMPLICIT_CONNECTION = object() + with self._monkey(IMPLICIT_CONNECTION): + self.assertTrue(self._callFUT() is IMPLICIT_CONNECTION) - FAKE_CONNECTION = object() - with _Monkey(_implicit_environ, CONNECTION=FAKE_CONNECTION): - stored_connection = gcloud.datastore._require_connection() - self.assertTrue(stored_connection is FAKE_CONNECTION) + def test__require_connection_implicit_set_passed_explicitly(self): + IMPLICIT_CONNECTION = object() + CONNECTION = object() + with self._monkey(IMPLICIT_CONNECTION): + self.assertTrue(self._callFUT(CONNECTION) is CONNECTION) class Test_get_entities_function(unittest2.TestCase): @@ -309,7 +343,6 @@ def test_get_entities_implicit(self): from gcloud.datastore import _implicit_environ from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection - from gcloud.datastore.test_entity import _Dataset from gcloud._testing import _Monkey DATASET_ID = 'DATASET' @@ -323,10 +356,10 @@ def test_get_entities_implicit(self): # Make a connection to return the entity pb. CUSTOM_CONNECTION = _Connection(entity_pb) - CUSTOM_DATASET = _Dataset() key = Key(KIND, ID, dataset_id=DATASET_ID) - with _Monkey(_implicit_environ, DATASET=CUSTOM_DATASET, + with _Monkey(_implicit_environ, + DATASET_ID=DATASET_ID, CONNECTION=CUSTOM_CONNECTION): result, = self._callFUT([key]) @@ -375,15 +408,13 @@ def test_allocate_ids_implicit(self): from gcloud.datastore import _implicit_environ from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection - from gcloud.datastore.test_entity import _Dataset from gcloud._testing import _Monkey - CUSTOM_DATASET = _Dataset() + DATASET_ID = 'DATASET' CUSTOM_CONNECTION = _Connection() NUM_IDS = 2 with _Monkey(_implicit_environ, - DATASET=CUSTOM_DATASET, - DATASET_ID=CUSTOM_DATASET.id(), + DATASET_ID=DATASET_ID, CONNECTION=CUSTOM_CONNECTION): INCOMPLETE_KEY = Key('KIND') result = self._callFUT(INCOMPLETE_KEY, NUM_IDS) @@ -395,14 +426,12 @@ def test_allocate_ids_with_complete(self): from gcloud.datastore import _implicit_environ from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection - from gcloud.datastore.test_entity import _Dataset from gcloud._testing import _Monkey - CUSTOM_DATASET = _Dataset() + DATASET_ID = 'DATASET' CUSTOM_CONNECTION = _Connection() with _Monkey(_implicit_environ, - DATASET=CUSTOM_DATASET, - DATASET_ID=CUSTOM_DATASET.id(), + DATASET_ID=DATASET_ID, CONNECTION=CUSTOM_CONNECTION): COMPLETE_KEY = Key('KIND', 1234) self.assertRaises(ValueError, self._callFUT, From ce5a2074052481167abed3125d48136a794087c8 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 14:35:52 -0500 Subject: [PATCH 7/9] Add 'set_default_dataset_id' helper. Will replace 'set_default_dataset'. --- gcloud/datastore/__init__.py | 20 +++++++++ gcloud/datastore/test___init__.py | 70 +++++++++++++++++++++++++++++++ 2 files changed, 90 insertions(+) diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index ce6483bd10f7..bc190149aa94 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -82,6 +82,26 @@ def set_default_dataset(dataset_id=None): _implicit_environ.DATASET = get_dataset(dataset_id) +def set_default_dataset_id(dataset_id=None): + """Set default dataset ID either explicitly or implicitly as fall-back. + + In implicit case, currently only supports enviroment variable but will + support App Engine, Compute Engine and other environments in the future. + + Local environment variable used is: + - GCLOUD_DATASET_ID + + :type dataset_id: :class:`str`. + :param dataset_id: Optional. The dataset ID to use for the default + dataset. + """ + if dataset_id is None: + dataset_id = os.getenv(_DATASET_ENV_VAR_NAME) + + if dataset_id is not None: + _implicit_environ.DATASET_ID = dataset_id + + def set_default_connection(connection=None): """Set default connection either explicitly or implicitly as fall-back. diff --git a/gcloud/datastore/test___init__.py b/gcloud/datastore/test___init__.py index e3b72043104d..a36eaaca6f07 100644 --- a/gcloud/datastore/test___init__.py +++ b/gcloud/datastore/test___init__.py @@ -97,6 +97,76 @@ def test_set_explicit(self): self.assertEqual(_implicit_environ.DATASET_ID, DATASET_ID) +class Test_set_default_dataset_id(unittest2.TestCase): + + def setUp(self): + from gcloud.datastore import _implicit_environ + self._replaced_dataset_id = _implicit_environ.DATASET_ID + _implicit_environ.DATASET_ID = None + + def tearDown(self): + from gcloud.datastore import _implicit_environ + _implicit_environ.DATASET_ID = self._replaced_dataset_id + + def _callFUT(self, dataset_id=None): + from gcloud.datastore import set_default_dataset_id + return set_default_dataset_id(dataset_id=dataset_id) + + def _monkey(self, implicit_dataset_id): + import os + from gcloud.datastore import _DATASET_ENV_VAR_NAME + from gcloud._testing import _Monkey + environ = {_DATASET_ENV_VAR_NAME: implicit_dataset_id} + return _Monkey(os, getenv=environ.get) + + def test_no_env_var_set(self): + from gcloud.datastore import _implicit_environ + with self._monkey(None): + self._callFUT() + self.assertEqual(_implicit_environ.DATASET_ID, None) + + def test_set_from_env_var(self): + from gcloud.datastore import _DATASET_ENV_VAR_NAME + from gcloud.datastore import _implicit_environ + + # Make a custom getenv function to Monkey. + DATASET = 'dataset' + VALUES = { + _DATASET_ENV_VAR_NAME: DATASET, + } + with self._monkey(DATASET): + self._callFUT() + self.assertEqual(_implicit_environ.DATASET_ID, DATASET) + + def test_set_explicit_w_env_var_set(self): + from gcloud.datastore import _implicit_environ + DATASET_ID = 'DATASET' + with self._monkey(None): + self._callFUT(DATASET_ID) + self.assertEqual(_implicit_environ.DATASET_ID, DATASET_ID) + + def test_set_explicit_no_env_var_set(self): + from gcloud.datastore import _implicit_environ + IMPLICIT_DATASET_ID = 'IMPLICIT' + DATASET_ID = 'DATASET' + with self._monkey(IMPLICIT_DATASET_ID): + self._callFUT(DATASET_ID) + self.assertEqual(_implicit_environ.DATASET_ID, DATASET_ID) + + def test_set_explicit_None_wo_env_var_set(self): + from gcloud.datastore import _implicit_environ + with self._monkey(None): + self._callFUT(None) + self.assertEqual(_implicit_environ.DATASET_ID, None) + + def test_set_explicit_None_w_env_var_set(self): + from gcloud.datastore import _implicit_environ + IMPLICIT_DATASET_ID = 'IMPLICIT' + with self._monkey(IMPLICIT_DATASET_ID): + self._callFUT(None) + self.assertEqual(_implicit_environ.DATASET_ID, IMPLICIT_DATASET_ID) + + class Test_set_default_connection(unittest2.TestCase): def setUp(self): From 8baf341d947f94f085676c70fa72f15d1987c763 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 14:44:00 -0500 Subject: [PATCH 8/9] Valet service. --- gcloud/datastore/connection.py | 4 ++-- gcloud/datastore/key.py | 1 + gcloud/datastore/test___init__.py | 24 +++++++++--------------- gcloud/datastore/test_key.py | 13 +++++-------- 4 files changed, 17 insertions(+), 25 deletions(-) diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index 5402d96c1ea2..8b6d18f22926 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -315,7 +315,7 @@ def begin_transaction(self, dataset_id, serializable=False): Maps the ``DatastoreService.BeginTransaction`` protobuf RPC. :type dataset_id: string - :param dataset_id: The ID dataset over which to execute the transaction. + :param dataset_id: The ID dataset to which the transaction applies. """ if self.transaction(): @@ -342,7 +342,7 @@ def commit(self, dataset_id, mutation_pb): Maps the ``DatastoreService.Commit`` protobuf RPC. :type dataset_id: string - :param dataset_id: The id of the dataset in which to perform the changes. + :param dataset_id: The ID dataset to which the transaction applies. :type mutation_pb: :class:`gcloud.datastore.datastore_v1_pb2.Mutation`. :param mutation_pb: The protobuf for the mutations being saved. diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index eb82aea13ea7..36fe239278f2 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -370,6 +370,7 @@ def parent(self): def __repr__(self): return '' % (self.path, self.dataset_id) + def _validate_dataset_id(dataset_id, parent): """Ensure the dataset ID is set appropriately. diff --git a/gcloud/datastore/test___init__.py b/gcloud/datastore/test___init__.py index a36eaaca6f07..397e0a1664ff 100644 --- a/gcloud/datastore/test___init__.py +++ b/gcloud/datastore/test___init__.py @@ -126,32 +126,26 @@ def test_no_env_var_set(self): self.assertEqual(_implicit_environ.DATASET_ID, None) def test_set_from_env_var(self): - from gcloud.datastore import _DATASET_ENV_VAR_NAME from gcloud.datastore import _implicit_environ - - # Make a custom getenv function to Monkey. - DATASET = 'dataset' - VALUES = { - _DATASET_ENV_VAR_NAME: DATASET, - } - with self._monkey(DATASET): + IMPLICIT_DATASET_ID = 'IMPLICIT' + with self._monkey(IMPLICIT_DATASET_ID): self._callFUT() - self.assertEqual(_implicit_environ.DATASET_ID, DATASET) + self.assertEqual(_implicit_environ.DATASET_ID, IMPLICIT_DATASET_ID) def test_set_explicit_w_env_var_set(self): from gcloud.datastore import _implicit_environ - DATASET_ID = 'DATASET' + EXPLICIT_DATASET_ID = 'EXPLICIT' with self._monkey(None): - self._callFUT(DATASET_ID) - self.assertEqual(_implicit_environ.DATASET_ID, DATASET_ID) + self._callFUT(EXPLICIT_DATASET_ID) + self.assertEqual(_implicit_environ.DATASET_ID, EXPLICIT_DATASET_ID) def test_set_explicit_no_env_var_set(self): from gcloud.datastore import _implicit_environ IMPLICIT_DATASET_ID = 'IMPLICIT' - DATASET_ID = 'DATASET' + EXPLICIT_DATASET_ID = 'EXPLICIT' with self._monkey(IMPLICIT_DATASET_ID): - self._callFUT(DATASET_ID) - self.assertEqual(_implicit_environ.DATASET_ID, DATASET_ID) + self._callFUT(EXPLICIT_DATASET_ID) + self.assertEqual(_implicit_environ.DATASET_ID, EXPLICIT_DATASET_ID) def test_set_explicit_None_wo_env_var_set(self): from gcloud.datastore import _implicit_environ diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index cc83ead0424b..26c2ff992678 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -54,7 +54,6 @@ def test_ctor_no_dataset_id(self): def test_ctor_w_implicit_dataset_id(self): _DATASET = 'DATASET' _KIND = 'KIND' - klass = self._getTargetClass() with self._monkeyDatasetID(_DATASET): key = self._makeOne(_KIND) self.assertEqual(key.dataset_id, _DATASET) @@ -64,13 +63,10 @@ def test_ctor_w_implicit_dataset_id(self): def test_ctor_w_implicit_dataset_id_empty_path(self): _DATASET = 'DATASET' - _KIND = 'KIND' - klass = self._getTargetClass() with self._monkeyDatasetID(_DATASET): self.assertRaises(ValueError, self._makeOne) def test_ctor_parent(self): - _DATASET = 'DATASET' _PARENT_KIND = 'KIND1' _PARENT_ID = 1234 _PARENT_DATASET = 'DATASET-ALT' @@ -82,8 +78,8 @@ def test_ctor_parent(self): {'kind': _CHILD_KIND, 'id': _CHILD_ID}, ] parent_key = self._makeOne(_PARENT_KIND, _PARENT_ID, - dataset_id=_PARENT_DATASET, - namespace=_PARENT_NAMESPACE) + dataset_id=_PARENT_DATASET, + namespace=_PARENT_NAMESPACE) with self._monkeyDatasetID(): key = self._makeOne(_CHILD_KIND, _CHILD_ID, parent=parent_key) self.assertEqual(key.dataset_id, parent_key.dataset_id) @@ -107,7 +103,8 @@ def test_ctor_parent_bad_namespace(self): with self._monkeyDatasetID(): parent_key = self._makeOne('KIND', 1234, namespace='FOO') with self.assertRaises(ValueError): - self._makeOne('KIND2', 1234, namespace='BAR', parent=parent_key) + self._makeOne( + 'KIND2', 1234, namespace='BAR', parent=parent_key) def test_ctor_parent_bad_dataset_id(self): parent_key = self._makeOne('KIND', 1234, dataset_id='FOO') @@ -178,7 +175,7 @@ def test_completed_key_on_partial_w_invalid(self): self.assertRaises(ValueError, key.completed_key, object()) def test_completed_key_on_complete(self): - with self._monkeyDatasetID() as monkey: + with self._monkeyDatasetID(): key = self._makeOne('KIND', 1234) self.assertRaises(ValueError, key.completed_key, 5678) From 225206363467248ea4f9e3e427443c4914a46004 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jan 2015 14:54:16 -0500 Subject: [PATCH 9/9] Remove 'set_default_dataset'. --- gcloud/datastore/__init__.py | 21 ------ gcloud/datastore/test___init__.py | 102 ++++++------------------------ regression/datastore.py | 3 +- 3 files changed, 21 insertions(+), 105 deletions(-) diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index bc190149aa94..4e5d03a3d4cb 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -61,27 +61,6 @@ _DATASET_ENV_VAR_NAME = 'GCLOUD_DATASET_ID' -def set_default_dataset(dataset_id=None): - """Set default dataset ID either explicitly or implicitly as fall-back. - - In implicit case, currently only supports enviroment variable but will - support App Engine, Compute Engine and other environments in the future. - - Local environment variable used is: - - GCLOUD_DATASET_ID - - :type dataset_id: :class:`str`. - :param dataset_id: Optional. The dataset ID to use for the default - dataset. - """ - if dataset_id is None: - dataset_id = os.getenv(_DATASET_ENV_VAR_NAME) - - if dataset_id is not None: - _implicit_environ.DATASET_ID = dataset_id - _implicit_environ.DATASET = get_dataset(dataset_id) - - def set_default_dataset_id(dataset_id=None): """Set default dataset ID either explicitly or implicitly as fall-back. diff --git a/gcloud/datastore/test___init__.py b/gcloud/datastore/test___init__.py index 397e0a1664ff..f36d885933a8 100644 --- a/gcloud/datastore/test___init__.py +++ b/gcloud/datastore/test___init__.py @@ -15,88 +15,6 @@ import unittest2 -class Test_get_connection(unittest2.TestCase): - - def _callFUT(self): - from gcloud.datastore import get_connection - return get_connection() - - def test_it(self): - from gcloud import credentials - from gcloud.datastore.connection import Connection - from gcloud.test_credentials import _Client - from gcloud._testing import _Monkey - - client = _Client() - with _Monkey(credentials, client=client): - found = self._callFUT() - self.assertTrue(isinstance(found, Connection)) - self.assertTrue(found._credentials is client._signed) - self.assertTrue(client._get_app_default_called) - - -class Test_set_default_dataset(unittest2.TestCase): - - def setUp(self): - from gcloud.datastore import _implicit_environ - self._replaced_dataset = _implicit_environ.DATASET - self._replaced_dataset_id = _implicit_environ.DATASET_ID - _implicit_environ.DATASET = _implicit_environ.DATASET_ID = None - - def tearDown(self): - from gcloud.datastore import _implicit_environ - _implicit_environ.DATASET = self._replaced_dataset - _implicit_environ.DATASET_ID = self._replaced_dataset_id - - def _callFUT(self, dataset_id=None): - from gcloud.datastore import set_default_dataset - return set_default_dataset(dataset_id=dataset_id) - - def _test_with_environ(self, environ, expected_result, dataset_id=None): - import os - from gcloud._testing import _Monkey - from gcloud import datastore - from gcloud.datastore import _implicit_environ - - # Check the environment is unset. - self.assertEqual(_implicit_environ.DATASET, None) - - def custom_getenv(key): - return environ.get(key) - - def custom_get_dataset(local_dataset_id): - return local_dataset_id - - with _Monkey(os, getenv=custom_getenv): - with _Monkey(datastore, get_dataset=custom_get_dataset): - self._callFUT(dataset_id=dataset_id) - - self.assertEqual(_implicit_environ.DATASET, expected_result) - - def test_set_from_env_var(self): - from gcloud.datastore import _DATASET_ENV_VAR_NAME - from gcloud.datastore import _implicit_environ - - # Make a custom getenv function to Monkey. - DATASET = 'dataset' - VALUES = { - _DATASET_ENV_VAR_NAME: DATASET, - } - self._test_with_environ(VALUES, DATASET) - self.assertEqual(_implicit_environ.DATASET_ID, DATASET) - - def test_no_env_var_set(self): - from gcloud.datastore import _implicit_environ - self._test_with_environ({}, None) - self.assertEqual(_implicit_environ.DATASET_ID, None) - - def test_set_explicit(self): - from gcloud.datastore import _implicit_environ - DATASET_ID = 'DATASET' - self._test_with_environ({}, DATASET_ID, dataset_id=DATASET_ID) - self.assertEqual(_implicit_environ.DATASET_ID, DATASET_ID) - - class Test_set_default_dataset_id(unittest2.TestCase): def setUp(self): @@ -198,6 +116,26 @@ def test_set_implicit(self): self.assertEqual(_implicit_environ.CONNECTION, fake_cnxn) +class Test_get_connection(unittest2.TestCase): + + def _callFUT(self): + from gcloud.datastore import get_connection + return get_connection() + + def test_it(self): + from gcloud import credentials + from gcloud.datastore.connection import Connection + from gcloud.test_credentials import _Client + from gcloud._testing import _Monkey + + client = _Client() + with _Monkey(credentials, client=client): + found = self._callFUT() + self.assertTrue(isinstance(found, Connection)) + self.assertTrue(found._credentials is client._signed) + self.assertTrue(client._get_app_default_called) + + class Test_get_dataset(unittest2.TestCase): def _callFUT(self, dataset_id): diff --git a/regression/datastore.py b/regression/datastore.py index 05138a961e75..55c69d6c67f9 100644 --- a/regression/datastore.py +++ b/regression/datastore.py @@ -27,8 +27,7 @@ from regression import populate_datastore -DATASET_ID = os.getenv('GCLOUD_TESTS_DATASET_ID') -datastore.set_default_dataset(dataset_id=DATASET_ID) +datastore.set_default_dataset_id() datastore.set_default_connection()