diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index 5033aec06b63..4e5d03a3d4cb 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -61,7 +61,7 @@ _DATASET_ENV_VAR_NAME = 'GCLOUD_DATASET_ID' -def set_default_dataset(dataset_id=None): +def set_default_dataset_id(dataset_id=None): """Set default dataset ID either explicitly or implicitly as fall-back. In implicit case, currently only supports enviroment variable but will @@ -79,7 +79,6 @@ def set_default_dataset(dataset_id=None): if dataset_id is not None: _implicit_environ.DATASET_ID = dataset_id - _implicit_environ.DATASET = get_dataset(dataset_id) def set_default_connection(connection=None): @@ -138,28 +137,40 @@ def get_dataset(dataset_id): return Dataset(dataset_id, connection=connection) -def _require_dataset(): - """Convenience method to ensure DATASET is set. +def _require_dataset_id(dataset_id=None): + """Infer a dataset ID from the environment, if not passed explicitly. + + :type dataset_id: :class:`str`. + :param dataset_id: Optional. :rtype: :class:`gcloud.datastore.dataset.Dataset` :returns: A dataset based on the current environment. - :raises: :class:`EnvironmentError` if DATASET is not set. + :raises: :class:`EnvironmentError` if ``dataset_id`` is None, + and cannot be inferred from the environment. """ - if _implicit_environ.DATASET is None: - raise EnvironmentError('Dataset could not be inferred.') - return _implicit_environ.DATASET + if dataset_id is None: + if _implicit_environ.DATASET_ID is None: + raise EnvironmentError('Dataset ID could not be inferred.') + dataset_id = _implicit_environ.DATASET_ID + return dataset_id -def _require_connection(): - """Convenience method to ensure CONNECTION is set. +def _require_connection(connection=None): + """Infer a connection from the environment, if not passed explicitly. + + :type connection: :class:`gcloud.datastore.connection.Connection` + :param connection: Optional. :rtype: :class:`gcloud.datastore.connection.Connection` :returns: A connection based on the current environment. - :raises: :class:`EnvironmentError` if CONNECTION is not set. + :raises: :class:`EnvironmentError` if ``connection`` is None, and + cannot be inferred from the environment. """ - if _implicit_environ.CONNECTION is None: - raise EnvironmentError('Connection could not be inferred.') - return _implicit_environ.CONNECTION + if connection is None: + if _implicit_environ.CONNECTION is None: + raise EnvironmentError('Connection could not be inferred.') + connection = _implicit_environ.CONNECTION + return connection def get_entities(keys, missing=None, deferred=None, @@ -188,8 +199,8 @@ def get_entities(keys, missing=None, deferred=None, :rtype: list of :class:`gcloud.datastore.entity.Entity` :returns: The requested entities. """ - connection = connection or _require_connection() - dataset_id = dataset_id or _require_dataset().id() + connection = _require_connection(connection) + dataset_id = _require_dataset_id(dataset_id) entity_pbs = connection.lookup( dataset_id=dataset_id, @@ -233,8 +244,8 @@ def allocate_ids(incomplete_key, num_ids, connection=None, dataset_id=None): :returns: The (complete) keys allocated with `incomplete_key` as root. :raises: `ValueError` if `incomplete_key` is not a partial key. """ - connection = connection or _require_connection() - dataset_id = dataset_id or _require_dataset().id() + connection = _require_connection(connection) + dataset_id = _require_dataset_id(dataset_id) if not incomplete_key.is_partial: raise ValueError(('Key is not partial.', incomplete_key)) diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index eeacc44ba35c..8b6d18f22926 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -163,17 +163,13 @@ def lookup(self, dataset_id, key_pbs, (:class:`gcloud.datastore.datastore_v1_pb2.Key` and :class:`gcloud.datastore.datastore_v1_pb2.Entity`) and is used under the hood for methods like - :func:`gcloud.datastore.dataset.Dataset.get_entity`: + :func:`gcloud.datastore.key.Key.get`: >>> from gcloud import datastore >>> from gcloud.datastore.key import Key >>> connection = datastore.get_connection() - >>> dataset = connection.dataset('dataset-id') - >>> key = Key(dataset=dataset).kind('MyKind').id(1234) - - Using the :class:`gcloud.datastore.dataset.Dataset` helper: - - >>> dataset.get_entity(key) + >>> key = Key('MyKind', 1234, dataset_id='dataset-id') + >>> key.get() Using the ``connection`` class directly: @@ -182,7 +178,7 @@ def lookup(self, dataset_id, key_pbs, :type dataset_id: string - :param dataset_id: The dataset to look up the keys. + :param dataset_id: The ID of the dataset to look up the keys. :type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key` (or a single Key) @@ -262,12 +258,12 @@ def run_query(self, dataset_id, query_pb, namespace=None, eventual=False): uses this method to fetch data: >>> from gcloud import datastore + >>> from gcloud.datastore.query import Query >>> connection = datastore.get_connection() - >>> dataset = connection.dataset('dataset-id') - >>> query = dataset.query().kind('MyKind').filter( - ... 'property', '=', 'val') + >>> query = Query(dataset_id='dataset-id', 'MyKind') + >>> query.add_filter('property', '=', 'val') - Using the `fetch`` method... + Using the query's ``fetch_page`` method... >>> entities, cursor, more_results = query.fetch_page() >>> entities @@ -319,7 +315,7 @@ def begin_transaction(self, dataset_id, serializable=False): Maps the ``DatastoreService.BeginTransaction`` protobuf RPC. :type dataset_id: string - :param dataset_id: The dataset over which to execute the transaction. + :param dataset_id: The ID dataset to which the transaction applies. """ if self.transaction(): @@ -346,7 +342,7 @@ def commit(self, dataset_id, mutation_pb): Maps the ``DatastoreService.Commit`` protobuf RPC. :type dataset_id: string - :param dataset_id: The dataset in which to perform the changes. + :param dataset_id: The ID dataset to which the transaction applies. :type mutation_pb: :class:`gcloud.datastore.datastore_v1_pb2.Mutation`. :param mutation_pb: The protobuf for the mutations being saved. @@ -376,7 +372,8 @@ def rollback(self, dataset_id): if the connection isn't currently in a transaction. :type dataset_id: string - :param dataset_id: The dataset to which the transaction belongs. + :param dataset_id: The id of the dataset to which the transaction + belongs. """ if not self.transaction() or not self.transaction().id: raise ValueError('No transaction to rollback.') @@ -393,7 +390,8 @@ def allocate_ids(self, dataset_id, key_pbs): Maps the ``DatastoreService.AllocateIds`` protobuf RPC. :type dataset_id: string - :param dataset_id: The dataset to which the transaction belongs. + :param dataset_id: The id of the dataset to which the transaction + belongs. :type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key` :param key_pbs: The keys for which the backend should allocate IDs. @@ -418,7 +416,7 @@ def save_entity(self, dataset_id, key_pb, properties, not passed in 'properties' no longer be set for the entity. :type dataset_id: string - :param dataset_id: The dataset in which to save the entity. + :param dataset_id: The id of the dataset in which to save the entity. :type key_pb: :class:`gcloud.datastore.datastore_v1_pb2.Key` :param key_pb: The complete or partial key for the entity. @@ -490,7 +488,7 @@ def delete_entities(self, dataset_id, key_pbs): :func:`gcloud.datastore.entity.Entity.delete` method. :type dataset_id: string - :param dataset_id: The dataset from which to delete the keys. + :param dataset_id: The ID of the dataset from which to delete the keys. :type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key` :param key_pbs: The keys to delete from the datastore. diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 479f136f43e9..47efe59639fa 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -40,10 +40,9 @@ class Entity(dict): This means you could take an existing entity and change the key to duplicate the object. - Use :func:`gcloud.datastore.dataset.Dataset.get_entity` - to retrieve an existing entity. + Use :func:`gcloud.datastore.key.Key.get` to retrieve an existing entity. - >>> dataset.get_entity(key) + >>> key.get() You can the set values on the entity just like you would on any diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index 2b289d452eba..36fe239278f2 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -66,28 +66,13 @@ def __init__(self, *path_args, **kwargs): keyword argument. """ self._flat_path = path_args - self._parent = kwargs.get('parent') + parent = self._parent = kwargs.get('parent') self._namespace = kwargs.get('namespace') - self._dataset_id = kwargs.get('dataset_id') + dataset_id = kwargs.get('dataset_id') + self._dataset_id = _validate_dataset_id(dataset_id, parent) # _flat_path, _parent, _namespace and _dataset_id must be set before # _combine_args() is called. self._path = self._combine_args() - self._validate_dataset_id() - - def _validate_dataset_id(self): - """Ensures the dataset ID is set. - - If unset, attempts to imply the ID from the environment. - - :raises: `ValueError` if there is no `dataset_id` and none - can be implied. - """ - if self._dataset_id is None: - if _implicit_environ.DATASET is not None: - # This assumes DATASET.id() is not None. - self._dataset_id = _implicit_environ.DATASET.id() - else: - raise ValueError('A Key must have a dataset ID set.') @staticmethod def _parse_path(path_args): @@ -345,7 +330,7 @@ def dataset_id(self): """Dataset ID getter. :rtype: :class:`str` - :returns: The key's dataset. + :returns: The key's dataset ID. """ return self._dataset_id @@ -384,3 +369,25 @@ def parent(self): def __repr__(self): return '' % (self.path, self.dataset_id) + + +def _validate_dataset_id(dataset_id, parent): + """Ensure the dataset ID is set appropriately. + + If ``parent`` is passed, skip the test (it will be checked / fixed up + later). + + If ``dataset_id`` is unset, attempt to infer the ID from the environment. + + :raises: `ValueError` if ``dataset_id`` is None and none can be inferred. + """ + if parent is None: + + if dataset_id is None: + + if _implicit_environ.DATASET_ID is None: + raise ValueError("A Key must have a dataset ID set.") + + dataset_id = _implicit_environ.DATASET_ID + + return dataset_id diff --git a/gcloud/datastore/test___init__.py b/gcloud/datastore/test___init__.py index 2146d8fdc2f9..f36d885933a8 100644 --- a/gcloud/datastore/test___init__.py +++ b/gcloud/datastore/test___init__.py @@ -15,86 +15,68 @@ import unittest2 -class Test_get_connection(unittest2.TestCase): - - def _callFUT(self): - from gcloud.datastore import get_connection - return get_connection() - - def test_it(self): - from gcloud import credentials - from gcloud.datastore.connection import Connection - from gcloud.test_credentials import _Client - from gcloud._testing import _Monkey - - client = _Client() - with _Monkey(credentials, client=client): - found = self._callFUT() - self.assertTrue(isinstance(found, Connection)) - self.assertTrue(found._credentials is client._signed) - self.assertTrue(client._get_app_default_called) - - -class Test_set_default_dataset(unittest2.TestCase): +class Test_set_default_dataset_id(unittest2.TestCase): def setUp(self): from gcloud.datastore import _implicit_environ - self._replaced_dataset = _implicit_environ.DATASET self._replaced_dataset_id = _implicit_environ.DATASET_ID - _implicit_environ.DATASET = _implicit_environ.DATASET_ID = None + _implicit_environ.DATASET_ID = None def tearDown(self): from gcloud.datastore import _implicit_environ - _implicit_environ.DATASET = self._replaced_dataset _implicit_environ.DATASET_ID = self._replaced_dataset_id def _callFUT(self, dataset_id=None): - from gcloud.datastore import set_default_dataset - return set_default_dataset(dataset_id=dataset_id) + from gcloud.datastore import set_default_dataset_id + return set_default_dataset_id(dataset_id=dataset_id) - def _test_with_environ(self, environ, expected_result, dataset_id=None): + def _monkey(self, implicit_dataset_id): import os + from gcloud.datastore import _DATASET_ENV_VAR_NAME from gcloud._testing import _Monkey - from gcloud import datastore - from gcloud.datastore import _implicit_environ - - # Check the environment is unset. - self.assertEqual(_implicit_environ.DATASET, None) + environ = {_DATASET_ENV_VAR_NAME: implicit_dataset_id} + return _Monkey(os, getenv=environ.get) - def custom_getenv(key): - return environ.get(key) - - def custom_get_dataset(local_dataset_id): - return local_dataset_id - - with _Monkey(os, getenv=custom_getenv): - with _Monkey(datastore, get_dataset=custom_get_dataset): - self._callFUT(dataset_id=dataset_id) - - self.assertEqual(_implicit_environ.DATASET, expected_result) + def test_no_env_var_set(self): + from gcloud.datastore import _implicit_environ + with self._monkey(None): + self._callFUT() + self.assertEqual(_implicit_environ.DATASET_ID, None) def test_set_from_env_var(self): - from gcloud.datastore import _DATASET_ENV_VAR_NAME from gcloud.datastore import _implicit_environ + IMPLICIT_DATASET_ID = 'IMPLICIT' + with self._monkey(IMPLICIT_DATASET_ID): + self._callFUT() + self.assertEqual(_implicit_environ.DATASET_ID, IMPLICIT_DATASET_ID) - # Make a custom getenv function to Monkey. - DATASET = 'dataset' - VALUES = { - _DATASET_ENV_VAR_NAME: DATASET, - } - self._test_with_environ(VALUES, DATASET) - self.assertEqual(_implicit_environ.DATASET_ID, DATASET) + def test_set_explicit_w_env_var_set(self): + from gcloud.datastore import _implicit_environ + EXPLICIT_DATASET_ID = 'EXPLICIT' + with self._monkey(None): + self._callFUT(EXPLICIT_DATASET_ID) + self.assertEqual(_implicit_environ.DATASET_ID, EXPLICIT_DATASET_ID) - def test_no_env_var_set(self): + def test_set_explicit_no_env_var_set(self): + from gcloud.datastore import _implicit_environ + IMPLICIT_DATASET_ID = 'IMPLICIT' + EXPLICIT_DATASET_ID = 'EXPLICIT' + with self._monkey(IMPLICIT_DATASET_ID): + self._callFUT(EXPLICIT_DATASET_ID) + self.assertEqual(_implicit_environ.DATASET_ID, EXPLICIT_DATASET_ID) + + def test_set_explicit_None_wo_env_var_set(self): from gcloud.datastore import _implicit_environ - self._test_with_environ({}, None) + with self._monkey(None): + self._callFUT(None) self.assertEqual(_implicit_environ.DATASET_ID, None) - def test_set_explicit(self): + def test_set_explicit_None_w_env_var_set(self): from gcloud.datastore import _implicit_environ - DATASET_ID = 'DATASET' - self._test_with_environ({}, DATASET_ID, dataset_id=DATASET_ID) - self.assertEqual(_implicit_environ.DATASET_ID, DATASET_ID) + IMPLICIT_DATASET_ID = 'IMPLICIT' + with self._monkey(IMPLICIT_DATASET_ID): + self._callFUT(None) + self.assertEqual(_implicit_environ.DATASET_ID, IMPLICIT_DATASET_ID) class Test_set_default_connection(unittest2.TestCase): @@ -134,6 +116,26 @@ def test_set_implicit(self): self.assertEqual(_implicit_environ.CONNECTION, fake_cnxn) +class Test_get_connection(unittest2.TestCase): + + def _callFUT(self): + from gcloud.datastore import get_connection + return get_connection() + + def test_it(self): + from gcloud import credentials + from gcloud.datastore.connection import Connection + from gcloud.test_credentials import _Client + from gcloud._testing import _Monkey + + client = _Client() + with _Monkey(credentials, client=client): + found = self._callFUT() + self.assertTrue(isinstance(found, Connection)) + self.assertTrue(found._credentials is client._signed) + self.assertTrue(client._get_app_default_called) + + class Test_get_dataset(unittest2.TestCase): def _callFUT(self, dataset_id): @@ -157,45 +159,79 @@ def test_it(self): self.assertTrue(client._get_app_default_called) -class Test_implicit_behavior(unittest2.TestCase): +class Test__require_dataset_id(unittest2.TestCase): + + _MARKER = object() - def test__require_dataset_value_unset(self): - import gcloud.datastore + def _callFUT(self, passed=_MARKER): + from gcloud.datastore import _require_dataset_id + if passed is self._MARKER: + return _require_dataset_id() + return _require_dataset_id(passed) + + def _monkey(self, dataset_id): from gcloud.datastore import _implicit_environ from gcloud._testing import _Monkey + return _Monkey(_implicit_environ, DATASET_ID=dataset_id) - with _Monkey(_implicit_environ, DATASET=None): + def test__require_dataset_implicit_unset(self): + with self._monkey(None): with self.assertRaises(EnvironmentError): - gcloud.datastore._require_dataset() + self._callFUT() - def test__require_dataset_value_set(self): - import gcloud.datastore - from gcloud.datastore import _implicit_environ - from gcloud._testing import _Monkey + def test__require_dataset_implicit_unset_passed_explicitly(self): + ID = 'DATASET' + with self._monkey(None): + self.assertEqual(self._callFUT(ID), ID) + + def test__require_dataset_id_implicit_set(self): + IMPLICIT_ID = 'IMPLICIT' + with self._monkey(IMPLICIT_ID): + stored_id = self._callFUT() + self.assertTrue(stored_id is IMPLICIT_ID) - FAKE_DATASET = object() - with _Monkey(_implicit_environ, DATASET=FAKE_DATASET): - stored_dataset = gcloud.datastore._require_dataset() - self.assertTrue(stored_dataset is FAKE_DATASET) + def test__require_dataset_id_implicit_set_passed_explicitly(self): + ID = 'DATASET' + IMPLICIT_ID = 'IMPLICIT' + with self._monkey(IMPLICIT_ID): + self.assertEqual(self._callFUT(ID), ID) - def test__require_connection_value_unset(self): - import gcloud.datastore + +class Test_require_connection(unittest2.TestCase): + + _MARKER = object() + + def _callFUT(self, passed=_MARKER): + from gcloud.datastore import _require_connection + if passed is self._MARKER: + return _require_connection() + return _require_connection(passed) + + def _monkey(self, connection): from gcloud.datastore import _implicit_environ from gcloud._testing import _Monkey + return _Monkey(_implicit_environ, CONNECTION=connection) - with _Monkey(_implicit_environ, CONNECTION=None): + def test__require_connection_implicit_unset(self): + with self._monkey(None): with self.assertRaises(EnvironmentError): - gcloud.datastore._require_connection() + self._callFUT() - def test__require_connection_value_set(self): - import gcloud.datastore - from gcloud.datastore import _implicit_environ - from gcloud._testing import _Monkey + def test__require_connection_implicit_unset_passed_explicitly(self): + CONNECTION = object() + with self._monkey(None): + self.assertTrue(self._callFUT(CONNECTION) is CONNECTION) + + def test__require_connection_implicit_set(self): + IMPLICIT_CONNECTION = object() + with self._monkey(IMPLICIT_CONNECTION): + self.assertTrue(self._callFUT() is IMPLICIT_CONNECTION) - FAKE_CONNECTION = object() - with _Monkey(_implicit_environ, CONNECTION=FAKE_CONNECTION): - stored_connection = gcloud.datastore._require_connection() - self.assertTrue(stored_connection is FAKE_CONNECTION) + def test__require_connection_implicit_set_passed_explicitly(self): + IMPLICIT_CONNECTION = object() + CONNECTION = object() + with self._monkey(IMPLICIT_CONNECTION): + self.assertTrue(self._callFUT(CONNECTION) is CONNECTION) class Test_get_entities_function(unittest2.TestCase): @@ -309,7 +345,6 @@ def test_get_entities_implicit(self): from gcloud.datastore import _implicit_environ from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection - from gcloud.datastore.test_entity import _Dataset from gcloud._testing import _Monkey DATASET_ID = 'DATASET' @@ -323,10 +358,10 @@ def test_get_entities_implicit(self): # Make a connection to return the entity pb. CUSTOM_CONNECTION = _Connection(entity_pb) - CUSTOM_DATASET = _Dataset() key = Key(KIND, ID, dataset_id=DATASET_ID) - with _Monkey(_implicit_environ, DATASET=CUSTOM_DATASET, + with _Monkey(_implicit_environ, + DATASET_ID=DATASET_ID, CONNECTION=CUSTOM_CONNECTION): result, = self._callFUT([key]) @@ -375,13 +410,13 @@ def test_allocate_ids_implicit(self): from gcloud.datastore import _implicit_environ from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection - from gcloud.datastore.test_entity import _Dataset from gcloud._testing import _Monkey - CUSTOM_DATASET = _Dataset() + DATASET_ID = 'DATASET' CUSTOM_CONNECTION = _Connection() NUM_IDS = 2 - with _Monkey(_implicit_environ, DATASET=CUSTOM_DATASET, + with _Monkey(_implicit_environ, + DATASET_ID=DATASET_ID, CONNECTION=CUSTOM_CONNECTION): INCOMPLETE_KEY = Key('KIND') result = self._callFUT(INCOMPLETE_KEY, NUM_IDS) @@ -393,12 +428,12 @@ def test_allocate_ids_with_complete(self): from gcloud.datastore import _implicit_environ from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection - from gcloud.datastore.test_entity import _Dataset from gcloud._testing import _Monkey - CUSTOM_DATASET = _Dataset() + DATASET_ID = 'DATASET' CUSTOM_CONNECTION = _Connection() - with _Monkey(_implicit_environ, DATASET=CUSTOM_DATASET, + with _Monkey(_implicit_environ, + DATASET_ID=DATASET_ID, CONNECTION=CUSTOM_CONNECTION): COMPLETE_KEY = Key('KIND', 1234) self.assertRaises(ValueError, self._callFUT, diff --git a/gcloud/datastore/test_entity.py b/gcloud/datastore/test_entity.py index 4a54ce7198be..8b4f4e786dc0 100644 --- a/gcloud/datastore/test_entity.py +++ b/gcloud/datastore/test_entity.py @@ -193,16 +193,6 @@ def get(self, connection=None): return self._stored -class _Dataset(dict): - - def __init__(self, connection=None): - super(_Dataset, self).__init__() - self._connection = connection - - def id(self): - return _DATASET_ID - - class _Connection(object): _transaction = _saved = _deleted = None _save_result = (False, None) diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index beb6fd92ef4a..26c2ff992678 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -17,8 +17,9 @@ class TestKey(unittest2.TestCase): + _DEFAULT_DATASET = 'DATASET' + def setUp(self): - self._DEFAULT_DATASET = 'DATASET' from gcloud.datastore import _implicit_environ self._replaced_dataset = _implicit_environ.DATASET @@ -31,41 +32,56 @@ def tearDown(self): _implicit_environ.DATASET_ID = self._replaced_dataset_id def _getTargetClass(self): - from gcloud.datastore import _implicit_environ - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key - - _implicit_environ.DATASET = Dataset(self._DEFAULT_DATASET) return Key def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) + def _monkeyDatasetID(self, dataset_id=_DEFAULT_DATASET): + from gcloud._testing import _Monkey + from gcloud.datastore import _implicit_environ + return _Monkey(_implicit_environ, DATASET_ID=dataset_id) + def test_ctor_empty(self): self.assertRaises(ValueError, self._makeOne) - def test_ctor_no_dataset(self): - from gcloud._testing import _Monkey - from gcloud.datastore import _implicit_environ + def test_ctor_no_dataset_id(self): klass = self._getTargetClass() - with _Monkey(_implicit_environ, DATASET=None): + with self._monkeyDatasetID(None): self.assertRaises(ValueError, klass, 'KIND') + def test_ctor_w_implicit_dataset_id(self): + _DATASET = 'DATASET' + _KIND = 'KIND' + with self._monkeyDatasetID(_DATASET): + key = self._makeOne(_KIND) + self.assertEqual(key.dataset_id, _DATASET) + self.assertEqual(key.namespace, None) + self.assertEqual(key.kind, _KIND) + self.assertEqual(key.path, [{'kind': _KIND}]) + + def test_ctor_w_implicit_dataset_id_empty_path(self): + _DATASET = 'DATASET' + with self._monkeyDatasetID(_DATASET): + self.assertRaises(ValueError, self._makeOne) + def test_ctor_parent(self): _PARENT_KIND = 'KIND1' _PARENT_ID = 1234 _PARENT_DATASET = 'DATASET-ALT' _PARENT_NAMESPACE = 'NAMESPACE' - parent_key = self._makeOne(_PARENT_KIND, _PARENT_ID, - dataset_id=_PARENT_DATASET, - namespace=_PARENT_NAMESPACE) _CHILD_KIND = 'KIND2' _CHILD_ID = 2345 _PATH = [ {'kind': _PARENT_KIND, 'id': _PARENT_ID}, {'kind': _CHILD_KIND, 'id': _CHILD_ID}, ] - key = self._makeOne(_CHILD_KIND, _CHILD_ID, parent=parent_key) + parent_key = self._makeOne(_PARENT_KIND, _PARENT_ID, + dataset_id=_PARENT_DATASET, + namespace=_PARENT_NAMESPACE) + with self._monkeyDatasetID(): + key = self._makeOne(_CHILD_KIND, _CHILD_ID, parent=parent_key) self.assertEqual(key.dataset_id, parent_key.dataset_id) self.assertEqual(key.namespace, parent_key.namespace) self.assertEqual(key.kind, _CHILD_KIND) @@ -73,23 +89,29 @@ def test_ctor_parent(self): self.assertTrue(key.parent is parent_key) def test_ctor_partial_parent(self): - parent_key = self._makeOne('KIND') - with self.assertRaises(ValueError): - self._makeOne('KIND2', 1234, parent=parent_key) + with self._monkeyDatasetID(): + parent_key = self._makeOne('KIND') + with self.assertRaises(ValueError): + self._makeOne('KIND2', 1234, parent=parent_key) def test_ctor_parent_bad_type(self): - with self.assertRaises(AttributeError): - self._makeOne('KIND2', 1234, parent=('KIND1', 1234)) + with self._monkeyDatasetID(): + with self.assertRaises(AttributeError): + self._makeOne('KIND2', 1234, parent=('KIND1', 1234)) def test_ctor_parent_bad_namespace(self): - parent_key = self._makeOne('KIND', 1234, namespace='FOO') - with self.assertRaises(ValueError): - self._makeOne('KIND2', 1234, namespace='BAR', parent=parent_key) + with self._monkeyDatasetID(): + parent_key = self._makeOne('KIND', 1234, namespace='FOO') + with self.assertRaises(ValueError): + self._makeOne( + 'KIND2', 1234, namespace='BAR', parent=parent_key) def test_ctor_parent_bad_dataset_id(self): parent_key = self._makeOne('KIND', 1234, dataset_id='FOO') - with self.assertRaises(ValueError): - self._makeOne('KIND2', 1234, dataset_id='BAR', parent=parent_key) + with self._monkeyDatasetID(): + with self.assertRaises(ValueError): + self._makeOne('KIND2', 1234, dataset_id='BAR', + parent=parent_key) def test_ctor_explicit(self): _DATASET = 'DATASET-ALT' @@ -99,19 +121,21 @@ def test_ctor_explicit(self): _PATH = [{'kind': _KIND, 'id': _ID}] key = self._makeOne(_KIND, _ID, namespace=_NAMESPACE, dataset_id=_DATASET) - self.assertNotEqual(_DATASET, self._DEFAULT_DATASET) self.assertEqual(key.dataset_id, _DATASET) self.assertEqual(key.namespace, _NAMESPACE) self.assertEqual(key.kind, _KIND) self.assertEqual(key.path, _PATH) def test_ctor_bad_kind(self): - self.assertRaises(ValueError, self._makeOne, object()) + with self._monkeyDatasetID(): + self.assertRaises(ValueError, self._makeOne, object()) def test_ctor_bad_id_or_name(self): - self.assertRaises(ValueError, self._makeOne, 'KIND', object()) - self.assertRaises(ValueError, self._makeOne, 'KIND', None) - self.assertRaises(ValueError, self._makeOne, 'KIND', 10, 'KIND2', None) + with self._monkeyDatasetID(): + self.assertRaises(ValueError, self._makeOne, 'KIND', object()) + self.assertRaises(ValueError, self._makeOne, 'KIND', None) + self.assertRaises(ValueError, + self._makeOne, 'KIND', 10, 'KIND2', None) def test__clone(self): _DATASET = 'DATASET-ALT' @@ -128,7 +152,8 @@ def test__clone(self): self.assertEqual(clone.path, _PATH) def test_completed_key_on_partial_w_id(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') _ID = 1234 new_key = key.completed_key(_ID) self.assertFalse(key is new_key) @@ -136,7 +161,8 @@ def test_completed_key_on_partial_w_id(self): self.assertEqual(new_key.name, None) def test_completed_key_on_partial_w_name(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') _NAME = 'NAME' new_key = key.completed_key(_NAME) self.assertFalse(key is new_key) @@ -144,17 +170,20 @@ def test_completed_key_on_partial_w_name(self): self.assertEqual(new_key.name, _NAME) def test_completed_key_on_partial_w_invalid(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') self.assertRaises(ValueError, key.completed_key, object()) def test_completed_key_on_complete(self): - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) self.assertRaises(ValueError, key.completed_key, 5678) def test_to_protobuf_defaults(self): from gcloud.datastore.datastore_v1_pb2 import Key as KeyPB _KIND = 'KIND' - key = self._makeOne(_KIND) + with self._monkeyDatasetID(): + key = self._makeOne(_KIND) pb = key.to_protobuf() self.assertTrue(isinstance(pb, KeyPB)) @@ -173,13 +202,15 @@ def test_to_protobuf_defaults(self): def test_to_protobuf_w_explicit_dataset(self): _DATASET = 'DATASET-ALT' - key = self._makeOne('KIND', dataset_id=_DATASET) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', dataset_id=_DATASET) pb = key.to_protobuf() self.assertEqual(pb.partition_id.dataset_id, _DATASET) def test_to_protobuf_w_explicit_namespace(self): _NAMESPACE = 'NAMESPACE' - key = self._makeOne('KIND', namespace=_NAMESPACE) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', namespace=_NAMESPACE) pb = key.to_protobuf() self.assertEqual(pb.partition_id.namespace, _NAMESPACE) @@ -188,7 +219,8 @@ def test_to_protobuf_w_explicit_path(self): _CHILD = 'CHILD' _ID = 1234 _NAME = 'NAME' - key = self._makeOne(_PARENT, _NAME, _CHILD, _ID) + with self._monkeyDatasetID(): + key = self._makeOne(_PARENT, _NAME, _CHILD, _ID) pb = key.to_protobuf() elems = list(pb.path_element) self.assertEqual(len(elems), 2) @@ -198,7 +230,8 @@ def test_to_protobuf_w_explicit_path(self): self.assertEqual(elems[1].id, _ID) def test_to_protobuf_w_no_kind(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') # Force the 'kind' to be unset. Maybe `to_protobuf` should fail # on this? The backend certainly will. key._path[-1].pop('kind') @@ -210,7 +243,8 @@ def test_get_explicit_connection_miss(self): cnxn_lookup_result = [] cnxn = _Connection(*cnxn_lookup_result) - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) entity = key.get(connection=cnxn) self.assertEqual(entity, None) @@ -221,7 +255,8 @@ def test_get_implicit_connection_miss(self): cnxn_lookup_result = [] cnxn = _Connection(*cnxn_lookup_result) - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) with _Monkey(_implicit_environ, CONNECTION=cnxn): entity = key.get() self.assertEqual(entity, None) @@ -248,7 +283,8 @@ def test_get_explicit_connection_hit(self): cnxn = _Connection(*cnxn_lookup_result) # Create key and look-up. - key = self._makeOne(KIND, ID) + with self._monkeyDatasetID(): + key = self._makeOne(KIND, ID) entity = key.get(connection=cnxn) self.assertEqual(entity.items(), [('foo', 'Foo')]) self.assertTrue(entity.key is key) @@ -257,7 +293,8 @@ def test_get_no_connection(self): from gcloud.datastore import _implicit_environ self.assertEqual(_implicit_environ.CONNECTION, None) - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) with self.assertRaises(EnvironmentError): key.get() @@ -265,7 +302,8 @@ def test_delete_explicit_connection(self): from gcloud.datastore.test_connection import _Connection cnxn = _Connection() - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) result = key.delete(connection=cnxn) self.assertEqual(result, None) self.assertEqual(cnxn._called_dataset_id, self._DEFAULT_DATASET) @@ -277,7 +315,8 @@ def test_delete_implicit_connection(self): from gcloud.datastore.test_connection import _Connection cnxn = _Connection() - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) with _Monkey(_implicit_environ, CONNECTION=cnxn): result = key.delete() @@ -289,62 +328,74 @@ def test_delete_no_connection(self): from gcloud.datastore import _implicit_environ self.assertEqual(_implicit_environ.CONNECTION, None) - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) with self.assertRaises(AttributeError): key.delete() def test_is_partial_no_name_or_id(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') self.assertTrue(key.is_partial) def test_is_partial_w_id(self): _ID = 1234 - key = self._makeOne('KIND', _ID) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', _ID) self.assertFalse(key.is_partial) def test_is_partial_w_name(self): _NAME = 'NAME' - key = self._makeOne('KIND', _NAME) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', _NAME) self.assertFalse(key.is_partial) def test_id_or_name_no_name_or_id(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') self.assertEqual(key.id_or_name, None) def test_id_or_name_no_name_or_id_child(self): - key = self._makeOne('KIND1', 1234, 'KIND2') + with self._monkeyDatasetID(): + key = self._makeOne('KIND1', 1234, 'KIND2') self.assertEqual(key.id_or_name, None) def test_id_or_name_w_id_only(self): _ID = 1234 - key = self._makeOne('KIND', _ID) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', _ID) self.assertEqual(key.id_or_name, _ID) def test_id_or_name_w_name_only(self): _NAME = 'NAME' - key = self._makeOne('KIND', _NAME) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', _NAME) self.assertEqual(key.id_or_name, _NAME) def test_parent_default(self): - key = self._makeOne('KIND') + with self._monkeyDatasetID(): + key = self._makeOne('KIND') self.assertEqual(key.parent, None) def test_parent_explicit_top_level(self): - key = self._makeOne('KIND', 1234) + with self._monkeyDatasetID(): + key = self._makeOne('KIND', 1234) self.assertEqual(key.parent, None) def test_parent_explicit_nested(self): _PARENT_KIND = 'KIND1' _PARENT_ID = 1234 _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] - key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') + with self._monkeyDatasetID(): + key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') self.assertEqual(key.parent.path, _PARENT_PATH) def test_parent_multiple_calls(self): _PARENT_KIND = 'KIND1' _PARENT_ID = 1234 _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] - key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') + with self._monkeyDatasetID(): + key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') parent = key.parent self.assertEqual(parent.path, _PARENT_PATH) new_parent = key.parent diff --git a/regression/datastore.py b/regression/datastore.py index 05138a961e75..55c69d6c67f9 100644 --- a/regression/datastore.py +++ b/regression/datastore.py @@ -27,8 +27,7 @@ from regression import populate_datastore -DATASET_ID = os.getenv('GCLOUD_TESTS_DATASET_ID') -datastore.set_default_dataset(dataset_id=DATASET_ID) +datastore.set_default_dataset_id() datastore.set_default_connection()