From 1036f8592c60a8dc7aa91e6075cb0e08877be780 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 20 Oct 2014 14:13:07 -0400 Subject: [PATCH 01/11] Drop 'Dataset.query' and 'Dataset.entity' convenience APIs. Point instead to constructing Query and Entity directly. Addresses #260, but only partially, for gcloud.datastore. --- gcloud/datastore/__init__.py | 10 +++++++--- gcloud/datastore/connection.py | 3 ++- gcloud/datastore/dataset.py | 28 ---------------------------- gcloud/datastore/demo/demo.py | 15 +++++++++------ gcloud/datastore/entity.py | 9 ++------- gcloud/datastore/query.py | 18 +++--------------- gcloud/datastore/test_dataset.py | 17 ----------------- gcloud/datastore/transaction.py | 23 +++++++++++------------ 8 files changed, 34 insertions(+), 89 deletions(-) diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index fee5a98b47b9..520e506daaed 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -7,8 +7,10 @@ ... 'long-email@googleapis.com', ... '/path/to/private.key') >>> # Then do other things... ->>> query = dataset.query().kind('EntityKind') ->>> entity = dataset.entity('EntityKind') +>>> from gcloud.datastore.entity import Entity +>>> from gcloud.datastore.query import Query +>>> query = Query('EntityKind', dataset) +>>> entity = Entity(dataset, 'EntityKind') The main concepts with this API are: @@ -80,7 +82,9 @@ def get_dataset(dataset_id, client_email, private_key_path): >>> from gcloud import datastore >>> dataset = datastore.get_dataset('dataset-id', email, key_path) >>> # Now you can do things with the dataset. - >>> dataset.query().kind('TestKind').fetch() + >>> from gcloud.datastore.query import Query + >>> query = Query('TestKind', dataset) + >>> query.fetch() [...] :type dataset_id: string diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index aff322d90926..1ca1079f3ea7 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -210,9 +210,10 @@ def run_query(self, dataset_id, query_pb, namespace=None): uses this method to fetch data: >>> from gcloud import datastore + >>> from gcloud.datastore.query import Query >>> connection = datastore.get_connection(email, key_path) >>> dataset = connection.dataset('dataset-id') - >>> query = dataset.query().kind('MyKind').filter('property =', 'val') + >>> query = Query('MyKind', dataset).filter('property =', 'val') Using the `fetch`` method... diff --git a/gcloud/datastore/dataset.py b/gcloud/datastore/dataset.py index 8ec22035c929..15e9b6b4aa5a 100644 --- a/gcloud/datastore/dataset.py +++ b/gcloud/datastore/dataset.py @@ -60,34 +60,6 @@ def id(self): return self._id - def query(self, *args, **kwargs): - """Create a query bound to this dataset. - - :param args: positional arguments, passed through to the Query - - :param kw: keyword arguments, passed through to the Query - - :rtype: :class:`gcloud.datastore.query.Query` - :returns: a new Query instance, bound to this dataset. - """ - # This import is here to avoid circular references. - from gcloud.datastore.query import Query - kwargs['dataset'] = self - return Query(*args, **kwargs) - - def entity(self, kind): - """Create an entity bound to this dataset. - - :type kind: string - :param kind: the "kind" of the new entity. - - :rtype: :class:`gcloud.datastore.entity.Entity` - :returns: a new Entity instance, bound to this dataset. - """ - # This import is here to avoid circular references. - from gcloud.datastore.entity import Entity - return Entity(dataset=self, kind=kind) - def transaction(self, *args, **kwargs): """Create a transaction bound to this dataset. diff --git a/gcloud/datastore/demo/demo.py b/gcloud/datastore/demo/demo.py index 4d511fa6cb44..e0b19da94aa1 100644 --- a/gcloud/datastore/demo/demo.py +++ b/gcloud/datastore/demo/demo.py @@ -6,10 +6,13 @@ # Let's start by importing the demo module and getting a dataset: from gcloud.datastore import demo +from gcloud.datastore.entity import Entity +from gcloud.datastore.query import Query + dataset = demo.get_dataset() # Let's create a new entity of type "Thing" and name it 'Toy': -toy = dataset.entity('Thing') +toy = Entity(dataset, 'Thing') toy.update({'name': 'Toy'}) # Now let's save it to our datastore: @@ -26,7 +29,7 @@ # Now let's try a more advanced query. # We'll start by look at all Thing entities: -query = dataset.query().kind('Thing') +query = Query('Thing', dataset) # Let's look at the first two. print query.limit(2).fetch() @@ -42,13 +45,13 @@ # (Check the official docs for explanations of what's happening here.) with dataset.transaction(): print 'Creating and savng an entity...' - thing = dataset.entity('Thing') + thing = Entity(dataset, 'Thing') thing.key(thing.key().name('foo')) thing['age'] = 10 thing.save() print 'Creating and saving another entity...' - thing2 = dataset.entity('Thing') + thing2 = Entity(dataset, 'Thing') thing2.key(thing2.key().name('bar')) thing2['age'] = 15 thing2.save() @@ -60,7 +63,7 @@ # To rollback a transaction, just call .rollback() with dataset.transaction() as t: - thing = dataset.entity('Thing') + thing = Entity(dataset, 'Thing') thing.key(thing.key().name('another')) thing.save() t.rollback() @@ -72,7 +75,7 @@ # Remember, a key won't be complete until the transaction is commited. # That is, while inside the transaction block, thing.key() will be incomplete. with dataset.transaction(): - thing = dataset.entity('Thing') + thing = Entity(dataset, 'Thing') thing.save() print thing.key() # This will be partial diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 88ecf1324ff2..86c8e7c22b84 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -39,15 +39,10 @@ class Entity(dict): This means you could take an existing entity and change the key to duplicate the object. - This can be used on its own, however it is likely easier to use - the shortcut methods provided by :class:`gcloud.datastore.dataset.Dataset` + Entities can be constructed directly, or obtained via + the "lookup" methods provided by :class:`gcloud.datastore.dataset.Dataset` such as: - - :func:`gcloud.datastore.dataset.Dataset.entity` to create a new entity. - - >>> dataset.entity('MyEntityKind') - - - :func:`gcloud.datastore.dataset.Dataset.get_entity` to retrieve an existing entity. diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index b556e380b3bd..7e959c8c6f01 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -18,24 +18,13 @@ class Query(object): and a clone is returned whenever any part of the query is modified:: - >>> query = Query('MyKind') + >>> query = Query('MyKind', dataset) >>> limited_query = query.limit(10) >>> query.limit() == 10 False >>> limited_query.limit() == 10 True - You typically won't construct a :class:`Query` - by initializing it like ``Query('MyKind', dataset=...)`` - but instead use the helper - :func:`gcloud.datastore.dataset.Dataset.query` method - which generates a query that can be executed - without any additional work:: - - >>> from gcloud import datastore - >>> dataset = datastore.get_dataset('dataset-id', email, key_path) - >>> query = dataset.query('MyKind') - :type kind: string :param kind: The kind to query. @@ -163,13 +152,12 @@ def ancestor(self, ancestor): For example:: >>> parent_key = Key.from_path('Person', '1') - >>> query = dataset.query('Person') + >>> query = Query('Person', dataset) >>> filtered_query = query.ancestor(parent_key) If you don't have a :class:`gcloud.datastore.key.Key` but just know the path, you can provide that as well:: - >>> query = dataset.query('Person') >>> filtered_query = query.ancestor(['Person', '1']) Each call to ``.ancestor()`` returns a cloned :class:`Query`, @@ -306,7 +294,7 @@ def fetch(self, limit=None): >>> from gcloud import datastore >>> dataset = datastore.get_dataset('dataset-id', email, key_path) - >>> query = dataset.query('Person').filter('name =', 'Sally') + >>> query = Query('Person', dataset).filter('name =', 'Sally') >>> query.fetch() [, , ...] >>> query.fetch(1) diff --git a/gcloud/datastore/test_dataset.py b/gcloud/datastore/test_dataset.py index 588499c2f41a..56cb142837be 100644 --- a/gcloud/datastore/test_dataset.py +++ b/gcloud/datastore/test_dataset.py @@ -26,23 +26,6 @@ def test_ctor_explicit(self): self.assertEqual(dataset.id(), DATASET_ID) self.assertTrue(dataset.connection() is CONNECTION) - def test_query_factory(self): - from gcloud.datastore.query import Query - DATASET_ID = 'DATASET' - dataset = self._makeOne(DATASET_ID) - query = dataset.query() - self.assertIsInstance(query, Query) - self.assertTrue(query.dataset() is dataset) - - def test_entity_factory(self): - from gcloud.datastore.entity import Entity - DATASET_ID = 'DATASET' - KIND = 'KIND' - dataset = self._makeOne(DATASET_ID) - entity = dataset.entity(KIND) - self.assertIsInstance(entity, Entity) - self.assertEqual(entity.kind(), KIND) - def test_transaction_factory(self): from gcloud.datastore.transaction import Transaction DATASET_ID = 'DATASET' diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py index d20f35a686d4..60c543b1cf1b 100644 --- a/gcloud/datastore/transaction.py +++ b/gcloud/datastore/transaction.py @@ -42,8 +42,9 @@ class Transaction(object): That means, if you try:: + >>> from gcloud.datastore.entity import Entity >>> with dataset.transaction(): - ... entity = dataset.entity('Thing').save() + ... entity = Entity(dataset, 'Thing').save() ``entity`` won't have a complete Key until the transaction is committed. @@ -53,7 +54,7 @@ class Transaction(object): to the entity:: >>> with dataset.transaction(): - ... entity = dataset.entity('Thing') + ... entity = Entity(dataset, 'Thing').save() ... entity.save() ... assert entity.key().is_partial() # There is no ID on this key. >>> assert not entity.key().is_partial() # There *is* an ID. @@ -75,7 +76,7 @@ class Transaction(object): >>> transaction = dataset.transaction() >>> transaction.begin() - >>> entity = dataset.entity('Thing') + >>> entity = Entity(dataset, 'Thing') >>> entity.save() >>> if error: @@ -94,19 +95,17 @@ class Transaction(object): For example, this is perfectly valid:: - >>> from gcloud import datastore >>> dataset = datastore.get_dataset('dataset-id', email, key_path) >>> with dataset.transaction(): - ... dataset.entity('Thing').save() + ... Entity(dataset, 'Thing').save() However, this **wouldn't** be acceptable:: - >>> from gcloud import datastore >>> dataset = datastore.get_dataset('dataset-id', email, key_path) >>> with dataset.transaction(): - ... dataset.entity('Thing').save() - ... with dataset.transaction(): - ... dataset.entity('Thing').save() + ... Entity(dataset, 'Thing').save() + ... with dataset.transaction(): + ... Entity(dataset, 'Thing').save() Technically, it looks like the Protobuf API supports this type of pattern, however it makes the code particularly messy. @@ -116,9 +115,9 @@ class Transaction(object): >>> dataset1 = datastore.get_dataset('dataset-id', email, key_path) >>> dataset2 = datastore.get_dataset('dataset-id', email, key_path) >>> with dataset1.transaction(): - ... dataset1.entity('Thing').save() - ... with dataset2.transaction(): - ... dataset2.entity('Thing').save() + ... Entity(dataset1, 'Thing').save() + ... with dataset2.transaction(): + ... Entity(dataset2, 'Thing').save() :type dataset: :class:`gcloud.datastore.dataset.Dataset` :param dataset: The dataset to which this :class:`Transaction` belongs. From 1c432a946d59bcf1f3e94ecd52e8f52786c611c9 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 20 Oct 2014 14:37:34 -0400 Subject: [PATCH 02/11] Add a factory registry to datastore._helpers. Work toward breaking cycles based on import-for-construction. --- gcloud/datastore/_helpers.py | 21 ++++++++ gcloud/datastore/test__helpers.py | 90 +++++++++++++++++++++++++++++++ 2 files changed, 111 insertions(+) diff --git a/gcloud/datastore/_helpers.py b/gcloud/datastore/_helpers.py index 1ebba5377457..e20e96c16533 100644 --- a/gcloud/datastore/_helpers.py +++ b/gcloud/datastore/_helpers.py @@ -14,6 +14,27 @@ INT_VALUE_CHECKER = Int64ValueChecker() +class DuplicateFactory(Exception): + """Conflicting factory registration.""" + + +class InvalidFactory(Exception): + """Unknown factory invocation.""" + + +_factories = {} # registry: name -> callable + +def _register_factory(name, factory): + if _factories.get(name) not in (None, factory): + raise DuplicateFactory(name) + _factories[name] = factory + +def _invoke_factory(name, *args, **kw): + factory = _factories.get(name) + if factory is None: + raise InvalidFactory(name) + return factory(*args, **kw) + def _get_protobuf_attribute_and_value(val): """Given a value, return the protobuf attribute name and proper value. diff --git a/gcloud/datastore/test__helpers.py b/gcloud/datastore/test__helpers.py index 84ea41ea1ec0..1d6999051bde 100644 --- a/gcloud/datastore/test__helpers.py +++ b/gcloud/datastore/test__helpers.py @@ -1,6 +1,96 @@ import unittest2 +class _FactoryBase(object): + + def setUp(self): + from gcloud.datastore._helpers import _factories + + self._before = _factories.copy() + + def tearDown(self): + from gcloud.datastore._helpers import _factories + + _factories.clear() + _factories.update(self._before) + + +class Test__register_factory(_FactoryBase, unittest2.TestCase): + + def _callFUT(self, name, factory): + from gcloud.datastore._helpers import _register_factory + + return _register_factory(name, factory) + + def test_it(self): + from gcloud.datastore._helpers import _factories + + class _Foo(object): + pass + + self._callFUT('Foo', _Foo) + self.assertTrue(_factories['Foo'] is _Foo) + + def test_duplicate_exact(self): + from gcloud.datastore._helpers import _factories + + class _Foo(object): + pass + + self._callFUT('Foo', _Foo) + self._callFUT('Foo', _Foo) + self.assertTrue(_factories['Foo'] is _Foo) + + def test_duplicate_conflict(self): + from gcloud.datastore._helpers import DuplicateFactory + + class _Foo(object): + pass + + class _Bar(object): + pass + + self._callFUT('Foo', _Foo) + self.assertRaises(DuplicateFactory, self._callFUT, 'Foo', _Bar) + + +class Test__invoke_factory(_FactoryBase, unittest2.TestCase): + + def setUp(self): + from gcloud.datastore._helpers import _factories + + super(Test__invoke_factory, self).setUp() + self._called_with = [] + self._widget = object() + _factories['Widget'] = self._factory + + def _callFUT(self, name, *args, **kw): + from gcloud.datastore._helpers import _invoke_factory + + return _invoke_factory(name, *args, **kw) + + def _factory(self, *args, **kw): + self._called_with.append((args, kw)) + return self._widget + + def test_missing_registration(self): + from gcloud.datastore._helpers import InvalidFactory + + self.assertRaises(InvalidFactory, self._callFUT, 'Nonesuch') + + def test_wo_args_or_kw(self): + self.assertTrue(self._callFUT('Widget') is self._widget) + self.assertEqual(self._called_with, [((), {})]) + + def test_w_args(self): + self.assertTrue(self._callFUT('Widget', 'foo', 42) is self._widget) + self.assertEqual(self._called_with, [(('foo', 42), {})]) + + def test_w_kw(self): + self.assertTrue(self._callFUT('Widget', foo=42) is self._widget) + self.assertEqual(self._called_with, [((), {'foo': 42})]) + + class Test__get_protobuf_attribute_and_value(unittest2.TestCase): def _callFUT(self, val): From 8d051229ef0f559c9a7fd84ee4276c178477ad73 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 20 Oct 2014 15:36:10 -0400 Subject: [PATCH 03/11] Break import cycles via a factory registry. Note that some classes have more than one factory (class methodsd like 'from_protobuf' and 'from_path'). Each is registered under a separate name. Addresses #260 (for gcloud.datastore). --- gcloud/datastore/__init__.py | 7 +++++ gcloud/datastore/_helpers.py | 38 +++++++++++++++--------- gcloud/datastore/dataset.py | 15 +++++----- gcloud/datastore/entity.py | 17 +++++------ gcloud/datastore/key.py | 7 +++++ gcloud/datastore/query.py | 12 +++++--- gcloud/datastore/test__helpers.py | 48 ++++++++++++++++++++++++------- gcloud/datastore/transaction.py | 5 ++++ regression/datastore.py | 7 +++-- 9 files changed, 110 insertions(+), 46 deletions(-) diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index 520e506daaed..d27b8715991a 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -34,6 +34,13 @@ which represents a lookup or search over the rows in the datastore. """ +import gcloud.datastore.connection +import gcloud.datastore.dataset +import gcloud.datastore.entity +import gcloud.datastore.key +import gcloud.datastore.query +import gcloud.datastore.transaction + __version__ = '0.1.2' SCOPE = ('https://www.googleapis.com/auth/datastore ', diff --git a/gcloud/datastore/_helpers.py b/gcloud/datastore/_helpers.py index e20e96c16533..be392bdfd086 100644 --- a/gcloud/datastore/_helpers.py +++ b/gcloud/datastore/_helpers.py @@ -8,8 +8,6 @@ from google.protobuf.internal.type_checkers import Int64ValueChecker import pytz -from gcloud.datastore.entity import Entity -from gcloud.datastore.key import Key INT_VALUE_CHECKER = Int64ValueChecker() @@ -22,18 +20,28 @@ class InvalidFactory(Exception): """Unknown factory invocation.""" -_factories = {} # registry: name -> callable +_FACTORIES = {} # registry: name -> callable + def _register_factory(name, factory): - if _factories.get(name) not in (None, factory): + """Register a factory by name.""" + if _FACTORIES.get(name) not in (None, factory): raise DuplicateFactory(name) - _factories[name] = factory + _FACTORIES[name] = factory -def _invoke_factory(name, *args, **kw): - factory = _factories.get(name) + +def _query_factory(name): + """Look up a factory by name.""" + factory = _FACTORIES.get(name) if factory is None: raise InvalidFactory(name) - return factory(*args, **kw) + return factory + + +def _invoke_factory(name, *args, **kw): + """Look up and call a factory by name.""" + return _query_factory(name)(*args, **kw) + def _get_protobuf_attribute_and_value(val): """Given a value, return the protobuf attribute name and proper value. @@ -68,6 +76,8 @@ def _get_protobuf_attribute_and_value(val): :returns: A tuple of the attribute name and proper value type. """ + key_class = _FACTORIES.get('Key') + entity_class = _FACTORIES.get('Entity') if isinstance(val, datetime.datetime): name = 'timestamp_microseconds' @@ -79,8 +89,6 @@ def _get_protobuf_attribute_and_value(val): val = val.astimezone(pytz.utc) # Convert the datetime to a microsecond timestamp. value = long(calendar.timegm(val.timetuple()) * 1e6) + val.microsecond - elif isinstance(val, Key): - name, value = 'key', val.to_protobuf() elif isinstance(val, bool): name, value = 'boolean', val elif isinstance(val, float): @@ -92,10 +100,12 @@ def _get_protobuf_attribute_and_value(val): name, value = 'string', val elif isinstance(val, (bytes, str)): name, value = 'blob', val - elif isinstance(val, Entity): - name, value = 'entity', val elif isinstance(val, list): name, value = 'list', val + elif key_class and isinstance(val, key_class): + name, value = 'key', val.to_protobuf() + elif entity_class and isinstance(val, entity_class): + name, value = 'entity', val else: raise ValueError("Unknown protobuf attr type %s" % type(val)) @@ -126,7 +136,7 @@ def _get_value_from_value_pb(value_pb): result = naive.replace(tzinfo=pytz.utc) elif value_pb.HasField('key_value'): - result = Key.from_protobuf(value_pb.key_value) + result = _FACTORIES['Key'].from_protobuf(value_pb.key_value) elif value_pb.HasField('boolean_value'): result = value_pb.boolean_value @@ -144,7 +154,7 @@ def _get_value_from_value_pb(value_pb): result = value_pb.blob_value elif value_pb.HasField('entity_value'): - result = Entity.from_protobuf(value_pb.entity_value) + result = _FACTORIES['Entity'].from_protobuf(value_pb.entity_value) elif value_pb.list_value: result = [_get_value_from_value_pb(x) for x in value_pb.list_value] diff --git a/gcloud/datastore/dataset.py b/gcloud/datastore/dataset.py index 15e9b6b4aa5a..2100098c8de8 100644 --- a/gcloud/datastore/dataset.py +++ b/gcloud/datastore/dataset.py @@ -1,5 +1,7 @@ """Create / interact with gcloud datastore datasets.""" +from . import _helpers + class Dataset(object): """A dataset in the Cloud Datastore. @@ -70,10 +72,8 @@ def transaction(self, *args, **kwargs): :rtype: :class:`gcloud.datastore.transaction.Transaction` :returns: a new Transaction instance, bound to this dataset. """ - # This import is here to avoid circular references. - from gcloud.datastore.transaction import Transaction kwargs['dataset'] = self - return Transaction(*args, **kwargs) + return _helpers._invoke_factory('Transaction', *args, **kwargs) def get_entity(self, key): """Retrieves entity from the dataset, along with its attributes. @@ -97,9 +97,6 @@ def get_entities(self, keys): :rtype: list of :class:`gcloud.datastore.entity.Entity` :return: The requested entities. """ - # This import is here to avoid circular references. - from gcloud.datastore.entity import Entity - entity_pbs = self.connection().lookup( dataset_id=self.id(), key_pbs=[k.to_protobuf() for k in keys] @@ -107,5 +104,9 @@ def get_entities(self, keys): entities = [] for entity_pb in entity_pbs: - entities.append(Entity.from_protobuf(entity_pb, dataset=self)) + entities.append(_helpers._invoke_factory('Entity_pb', + entity_pb, dataset=self)) return entities + + +_helpers._register_factory('Dataset', Dataset) diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 86c8e7c22b84..b0b5d0082415 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -16,7 +16,8 @@ """ from gcloud.datastore import datastore_v1_pb2 as datastore_pb -from gcloud.datastore.key import Key + +from . import _helpers class NoKey(RuntimeError): @@ -69,7 +70,7 @@ def __init__(self, dataset=None, kind=None): super(Entity, self).__init__() self._dataset = dataset if kind: - self._key = Key().kind(kind) + self._key = _helpers._invoke_factory('Key').kind(kind) else: self._key = None @@ -154,11 +155,7 @@ def from_protobuf(cls, pb, dataset=None): :returns: The :class:`Entity` derived from the :class:`gcloud.datastore.datastore_v1_pb2.Entity`. """ - - # This is here to avoid circular imports. - from gcloud.datastore import _helpers - - key = Key.from_protobuf(pb.key) + key = _helpers._invoke_factory('Key_from_protobuf', pb.key) entity = cls.from_key(key, dataset) for property_pb in pb.property: @@ -243,7 +240,7 @@ def save(self): transaction.add_auto_id_entity(self) if isinstance(key_pb, datastore_pb.Key): - updated_key = Key.from_protobuf(key_pb) + updated_key = _helpers._invoke_factory('Key_from_protobuf', key_pb) # Update the path (which may have been altered). self._key = key.path(updated_key.path()) @@ -270,3 +267,7 @@ def __repr__(self): super(Entity, self).__repr__()) else: return '' % (super(Entity, self).__repr__()) + + +_helpers._register_factory('Entity', Entity) +_helpers._register_factory('Entity_pb', Entity.from_protobuf) diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index be28c4db42e7..059ba414e063 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -5,6 +5,8 @@ from gcloud.datastore import datastore_v1_pb2 as datastore_pb +from . import _helpers + class Key(object): """An immutable representation of a datastore Key. @@ -251,3 +253,8 @@ def parent(self): def __repr__(self): return '' % self.path() + + +_helpers._register_factory('Key', Key) +_helpers._register_factory('Key_from_protobuf', Key.from_protobuf) +_helpers._register_factory('Key_path', Key.from_path) diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index 7e959c8c6f01..2f917ba57d3d 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -3,10 +3,10 @@ import base64 from gcloud.datastore import datastore_v1_pb2 as datastore_pb -from gcloud.datastore import _helpers -from gcloud.datastore.entity import Entity from gcloud.datastore.key import Key +from . import _helpers + class Query(object): """A Query against the Cloud Datastore. @@ -191,7 +191,7 @@ def ancestor(self, ancestor): # If a list was provided, turn it into a Key. if isinstance(ancestor, list): - ancestor = Key.from_path(*ancestor) + ancestor = _helpers._invoke_factory('Key_path', *ancestor) # If we don't have a Key value by now, something is wrong. if not isinstance(ancestor, Key): @@ -324,7 +324,8 @@ def fetch(self, limit=None): entity_pbs, end_cursor = query_results[:2] self._cursor = end_cursor - return [Entity.from_protobuf(entity, dataset=self.dataset()) + return [_helpers._invoke_factory('Entity_pb', entity, + dataset=self.dataset()) for entity in entity_pbs] def cursor(self): @@ -392,3 +393,6 @@ def order(self, *properties): property_order.direction = property_order.ASCENDING return clone + + +_helpers._register_factory('Query', Query) diff --git a/gcloud/datastore/test__helpers.py b/gcloud/datastore/test__helpers.py index 1d6999051bde..cc1ccba76df7 100644 --- a/gcloud/datastore/test__helpers.py +++ b/gcloud/datastore/test__helpers.py @@ -4,15 +4,15 @@ class _FactoryBase(object): def setUp(self): - from gcloud.datastore._helpers import _factories + from gcloud.datastore._helpers import _FACTORIES - self._before = _factories.copy() + self._before = _FACTORIES.copy() def tearDown(self): - from gcloud.datastore._helpers import _factories + from gcloud.datastore._helpers import _FACTORIES - _factories.clear() - _factories.update(self._before) + _FACTORIES.clear() + _FACTORIES.update(self._before) class Test__register_factory(_FactoryBase, unittest2.TestCase): @@ -23,23 +23,23 @@ def _callFUT(self, name, factory): return _register_factory(name, factory) def test_it(self): - from gcloud.datastore._helpers import _factories + from gcloud.datastore._helpers import _FACTORIES class _Foo(object): pass self._callFUT('Foo', _Foo) - self.assertTrue(_factories['Foo'] is _Foo) + self.assertTrue(_FACTORIES['Foo'] is _Foo) def test_duplicate_exact(self): - from gcloud.datastore._helpers import _factories + from gcloud.datastore._helpers import _FACTORIES class _Foo(object): pass self._callFUT('Foo', _Foo) self._callFUT('Foo', _Foo) - self.assertTrue(_factories['Foo'] is _Foo) + self.assertTrue(_FACTORIES['Foo'] is _Foo) def test_duplicate_conflict(self): from gcloud.datastore._helpers import DuplicateFactory @@ -54,15 +54,41 @@ class _Bar(object): self.assertRaises(DuplicateFactory, self._callFUT, 'Foo', _Bar) +class Test__query_factory(_FactoryBase, unittest2.TestCase): + + def setUp(self): + from gcloud.datastore._helpers import _FACTORIES + + super(Test__query_factory, self).setUp() + _FACTORIES['Widget'] = self._factory + + def _callFUT(self, name, *args, **kw): + from gcloud.datastore._helpers import _query_factory + + return _query_factory(name, *args, **kw) + + @staticmethod + def _factory(*args, **kw): # pragma: NO COVER + pass + + def test_miss(self): + from gcloud.datastore._helpers import InvalidFactory + + self.assertRaises(InvalidFactory, self._callFUT, 'Nonesuch') + + def test_hit(self): + self.assertTrue(self._callFUT('Widget') is self._factory) + + class Test__invoke_factory(_FactoryBase, unittest2.TestCase): def setUp(self): - from gcloud.datastore._helpers import _factories + from gcloud.datastore._helpers import _FACTORIES super(Test__invoke_factory, self).setUp() self._called_with = [] self._widget = object() - _factories['Widget'] = self._factory + _FACTORIES['Widget'] = self._factory def _callFUT(self, name, *args, **kw): from gcloud.datastore._helpers import _invoke_factory diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py index 60c543b1cf1b..446bf3a2c457 100644 --- a/gcloud/datastore/transaction.py +++ b/gcloud/datastore/transaction.py @@ -3,6 +3,8 @@ from gcloud.datastore import datastore_v1_pb2 as datastore_pb from gcloud.datastore.key import Key +from . import _helpers + class Transaction(object): """An abstraction representing datastore Transactions. @@ -253,3 +255,6 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.commit() else: self.rollback() + + +_helpers._register_factory('Transaction', Transaction) diff --git a/regression/datastore.py b/regression/datastore.py index 047b6f586dfe..4a4f42ac5c3b 100644 --- a/regression/datastore.py +++ b/regression/datastore.py @@ -30,6 +30,7 @@ def _get_dataset(self): return self._datasets[self._dataset_id] def _get_post(self, name=None, key_id=None, post_content=None): + from gcloud.datastore.entity import Entity post_content = post_content or { 'title': 'How to make the perfect pizza in your grill', 'tags': ['pizza', 'grill'], @@ -44,7 +45,7 @@ def _get_post(self, name=None, key_id=None, post_content=None): } # Create an entity with the given content in our dataset. dataset = self._get_dataset() - entity = dataset.entity(kind='Post') + entity = Entity(dataset, 'Post') entity.update(post_content) # Update the entity key. @@ -114,5 +115,7 @@ def test_save_multiple(self): self.assertEqual(len(matches), 2) def test_empty_kind(self): - posts = self._get_dataset().query().kind('Post').limit(2).fetch() + from gcloud.datastore.query import Query + dataset = self._get_dataset() + posts = Query(dataset=dataset).kind('Post').limit(2).fetch() self.assertEqual(posts, []) From 3c1d516d1bb38a5df7301c2c4c144623340dfded Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 20 Oct 2014 15:46:22 -0400 Subject: [PATCH 04/11] Tidy local imports, remove remaining cycles. --- gcloud/datastore/entity.py | 3 +-- gcloud/datastore/key.py | 3 +-- gcloud/datastore/query.py | 6 ++---- gcloud/datastore/transaction.py | 8 ++++---- 4 files changed, 8 insertions(+), 12 deletions(-) diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index b0b5d0082415..72c93b2a2063 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -15,8 +15,7 @@ delete or persist the data stored on the entity. """ -from gcloud.datastore import datastore_v1_pb2 as datastore_pb - +from . import datastore_v1_pb2 as datastore_pb from . import _helpers diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index 059ba414e063..cf4d560b1ae6 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -3,8 +3,7 @@ import copy from itertools import izip -from gcloud.datastore import datastore_v1_pb2 as datastore_pb - +from . import datastore_v1_pb2 as datastore_pb from . import _helpers diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index 2f917ba57d3d..4a10a7e9a364 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -2,9 +2,7 @@ import base64 -from gcloud.datastore import datastore_v1_pb2 as datastore_pb -from gcloud.datastore.key import Key - +from . import datastore_v1_pb2 as datastore_pb from . import _helpers @@ -194,7 +192,7 @@ def ancestor(self, ancestor): ancestor = _helpers._invoke_factory('Key_path', *ancestor) # If we don't have a Key value by now, something is wrong. - if not isinstance(ancestor, Key): + if not isinstance(ancestor, _helpers._query_factory('Key')): raise TypeError('Expected list or Key, got %s.' % type(ancestor)) # Get the composite filter and add a new property filter. diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py index 446bf3a2c457..93833bd70ad1 100644 --- a/gcloud/datastore/transaction.py +++ b/gcloud/datastore/transaction.py @@ -1,8 +1,6 @@ """Create / interact with gcloud datastore transactions.""" -from gcloud.datastore import datastore_v1_pb2 as datastore_pb -from gcloud.datastore.key import Key - +from . import datastore_v1_pb2 as datastore_pb from . import _helpers @@ -237,7 +235,9 @@ def commit(self): # For any of the auto-id entities, make sure we update their keys. for i, entity in enumerate(self._auto_id_entities): key_pb = result.insert_auto_id_key[i] - key = Key.from_protobuf(key_pb) + # We can ignore the 'dataset' parameter ecause we are only + # creating a transient instance in order to copy its path. + key = _helpers._invoke_factory('Key', key_pb) entity.key(entity.key().path(key.path())) # Tell the connection that the transaction is over. From cf29c90145af770936d9361517fa772d75e7fb76 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 20 Oct 2014 16:20:32 -0400 Subject: [PATCH 05/11] Fix build: invoke correct factory when unmarshalling key from pb. --- gcloud/datastore/transaction.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py index 93833bd70ad1..f0f0e0d1ad30 100644 --- a/gcloud/datastore/transaction.py +++ b/gcloud/datastore/transaction.py @@ -237,7 +237,7 @@ def commit(self): key_pb = result.insert_auto_id_key[i] # We can ignore the 'dataset' parameter ecause we are only # creating a transient instance in order to copy its path. - key = _helpers._invoke_factory('Key', key_pb) + key = _helpers._invoke_factory('Key_pb', key_pb) entity.key(entity.key().path(key.path())) # Tell the connection that the transaction is over. From 31f60d8169c2d8b121ca57b175ea846f3f2e3f74 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 20 Oct 2014 16:27:07 -0400 Subject: [PATCH 06/11] Reorder 'Query.__init__' paramters to match Entity. --- gcloud/datastore/__init__.py | 4 +-- gcloud/datastore/connection.py | 2 +- gcloud/datastore/demo/demo.py | 2 +- gcloud/datastore/query.py | 2 +- gcloud/datastore/test_connection.py | 4 +-- gcloud/datastore/test_query.py | 42 ++++++++++++++--------------- 6 files changed, 28 insertions(+), 28 deletions(-) diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index d27b8715991a..fcefe356a3a9 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -9,7 +9,7 @@ >>> # Then do other things... >>> from gcloud.datastore.entity import Entity >>> from gcloud.datastore.query import Query ->>> query = Query('EntityKind', dataset) +>>> query = Query(dataset, 'EntityKind') >>> entity = Entity(dataset, 'EntityKind') The main concepts with this API are: @@ -90,7 +90,7 @@ def get_dataset(dataset_id, client_email, private_key_path): >>> dataset = datastore.get_dataset('dataset-id', email, key_path) >>> # Now you can do things with the dataset. >>> from gcloud.datastore.query import Query - >>> query = Query('TestKind', dataset) + >>> query = Query(dataset, 'TestKind') >>> query.fetch() [...] diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index 1ca1079f3ea7..5c158f32fd75 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -213,7 +213,7 @@ def run_query(self, dataset_id, query_pb, namespace=None): >>> from gcloud.datastore.query import Query >>> connection = datastore.get_connection(email, key_path) >>> dataset = connection.dataset('dataset-id') - >>> query = Query('MyKind', dataset).filter('property =', 'val') + >>> query = Query(dataset, 'MyKind').filter('property =', 'val') Using the `fetch`` method... diff --git a/gcloud/datastore/demo/demo.py b/gcloud/datastore/demo/demo.py index e0b19da94aa1..67eaf11ba2a4 100644 --- a/gcloud/datastore/demo/demo.py +++ b/gcloud/datastore/demo/demo.py @@ -29,7 +29,7 @@ # Now let's try a more advanced query. # We'll start by look at all Thing entities: -query = Query('Thing', dataset) +query = Query(dataset, 'Thing') # Let's look at the first two. print query.limit(2).fetch() diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index 4a10a7e9a364..4c5aedbf3926 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -42,7 +42,7 @@ class Query(object): } """Mapping of operator strings and their protobuf equivalents.""" - def __init__(self, kind=None, dataset=None, namespace=None): + def __init__(self, dataset=None, kind=None, namespace=None): self._dataset = dataset self._namespace = namespace self._pb = datastore_pb.Query() diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py index e63aae1e6449..a286255ee291 100644 --- a/gcloud/datastore/test_connection.py +++ b/gcloud/datastore/test_connection.py @@ -313,7 +313,7 @@ def test_run_query_wo_namespace_empty_result(self): DATASET_ID = 'DATASET' KIND = 'Nonesuch' - q_pb = Query(KIND, DATASET_ID).to_protobuf() + q_pb = Query(kind=KIND).to_protobuf() rsp_pb = datastore_pb.RunQueryResponse() conn = self._makeOne() URI = '/'.join([ @@ -349,7 +349,7 @@ def test_run_query_w_namespace_nonempty_result(self): DATASET_ID = 'DATASET' KIND = 'Kind' entity = datastore_pb.Entity() - q_pb = Query(KIND, DATASET_ID).to_protobuf() + q_pb = Query(kind=KIND).to_protobuf() rsp_pb = datastore_pb.RunQueryResponse() rsp_pb.batch.entity_result.add(entity=entity) rsp_pb.batch.entity_result_type = 1 # FULL diff --git a/gcloud/datastore/test_query.py b/gcloud/datastore/test_query.py index eeac85676e96..15cef3585b53 100644 --- a/gcloud/datastore/test_query.py +++ b/gcloud/datastore/test_query.py @@ -8,8 +8,8 @@ def _getTargetClass(self): return Query - def _makeOne(self, kind=None, dataset=None, namespace=None): - return self._getTargetClass()(kind, dataset, namespace) + def _makeOne(self, dataset=None, kind=None, namespace=None): + return self._getTargetClass()(dataset, kind, namespace) def test_ctor_defaults(self): query = self._getTargetClass()() @@ -25,7 +25,7 @@ def test_ctor_explicit(self): _KIND = 'KIND' _NAMESPACE = 'NAMESPACE' dataset = Dataset(_DATASET) - query = self._makeOne(_KIND, dataset, _NAMESPACE) + query = self._makeOne(dataset, _KIND, _NAMESPACE) self.assertTrue(query.dataset() is dataset) kq_pb, = list(query.kind()) self.assertEqual(kq_pb.name, _KIND) @@ -39,7 +39,7 @@ def test__clone(self): _CURSOR = 'DEADBEEF' _NAMESPACE = 'NAMESPACE' dataset = Dataset(_DATASET) - query = self._makeOne(_KIND, dataset, _NAMESPACE) + query = self._makeOne(dataset, _KIND, _NAMESPACE) query._cursor = _CURSOR clone = query._clone() self.assertFalse(clone is query) @@ -58,7 +58,7 @@ def test_to_protobuf_empty(self): def test_to_protobuf_w_kind(self): _KIND = 'KIND' - query = self._makeOne(_KIND) + query = self._makeOne(kind=_KIND) q_pb = query.to_protobuf() kq_pb, = list(q_pb.kind) self.assertEqual(kq_pb.name, _KIND) @@ -189,7 +189,7 @@ def test_kind_setter_wo_existing(self): _DATASET = 'DATASET' _KIND = 'KIND' dataset = Dataset(_DATASET) - query = self._makeOne(dataset=dataset) + query = self._makeOne(dataset) after = query.kind(_KIND) self.assertFalse(after is query) self.assertTrue(isinstance(after, self._getTargetClass())) @@ -203,7 +203,7 @@ def test_kind_setter_w_existing(self): _KIND_BEFORE = 'KIND_BEFORE' _KIND_AFTER = 'KIND_AFTER' dataset = Dataset(_DATASET) - query = self._makeOne(_KIND_BEFORE, dataset) + query = self._makeOne(dataset, _KIND_BEFORE) after = query.kind(_KIND_AFTER) self.assertFalse(after is query) self.assertTrue(isinstance(after, self._getTargetClass())) @@ -218,7 +218,7 @@ def test_limit_setter_wo_existing(self): _KIND = 'KIND' _LIMIT = 42 dataset = Dataset(_DATASET) - query = self._makeOne(_KIND, dataset) + query = self._makeOne(dataset, _KIND) after = query.limit(_LIMIT) self.assertFalse(after is query) self.assertTrue(isinstance(after, self._getTargetClass())) @@ -232,7 +232,7 @@ def test_dataset_setter(self): _DATASET = 'DATASET' _KIND = 'KIND' dataset = Dataset(_DATASET) - query = self._makeOne(_KIND) + query = self._makeOne(kind=_KIND) after = query.dataset(dataset) self.assertFalse(after is query) self.assertTrue(isinstance(after, self._getTargetClass())) @@ -254,7 +254,7 @@ def test_fetch_default_limit(self): prop.value.string_value = u'Foo' connection = _Connection(entity_pb) dataset = _Dataset(_DATASET, connection) - query = self._makeOne(_KIND, dataset) + query = self._makeOne(dataset, _KIND) entities = query.fetch() self.assertEqual(len(entities), 1) self.assertEqual(entities[0].key().path(), @@ -283,7 +283,7 @@ def test_fetch_explicit_limit(self): connection = _Connection(entity_pb) connection._cursor = _CURSOR dataset = _Dataset(_DATASET, connection) - query = self._makeOne(_KIND, dataset, _NAMESPACE) + query = self._makeOne(dataset, _KIND, _NAMESPACE) limited = query.limit(13) entities = query.fetch(13) self.assertEqual(query._cursor, _CURSOR) @@ -302,7 +302,7 @@ def test_cursor_not_fetched(self): _KIND = 'KIND' connection = _Connection() dataset = _Dataset(_DATASET, connection) - query = self._makeOne(_KIND, dataset) + query = self._makeOne(dataset, _KIND) self.assertRaises(RuntimeError, query.cursor) def test_cursor_fetched(self): @@ -312,7 +312,7 @@ def test_cursor_fetched(self): _KIND = 'KIND' connection = _Connection() dataset = _Dataset(_DATASET, connection) - query = self._makeOne(_KIND, dataset) + query = self._makeOne(dataset, _KIND) query._cursor = _CURSOR self.assertEqual(query.cursor(), base64.b64encode(_CURSOR)) @@ -321,7 +321,7 @@ def test_with_cursor_neither(self): _KIND = 'KIND' connection = _Connection() dataset = _Dataset(_DATASET, connection) - query = self._makeOne(_KIND, dataset) + query = self._makeOne(dataset, _KIND) self.assertTrue(query.with_cursor(None) is query) def test_with_cursor_w_start(self): @@ -332,7 +332,7 @@ def test_with_cursor_w_start(self): _KIND = 'KIND' connection = _Connection() dataset = _Dataset(_DATASET, connection) - query = self._makeOne(_KIND, dataset) + query = self._makeOne(dataset, _KIND) after = query.with_cursor(_CURSOR_B64) self.assertFalse(after is query) q_pb = after.to_protobuf() @@ -347,7 +347,7 @@ def test_with_cursor_w_end(self): _KIND = 'KIND' connection = _Connection() dataset = _Dataset(_DATASET, connection) - query = self._makeOne(_KIND, dataset) + query = self._makeOne(dataset, _KIND) after = query.with_cursor(None, _CURSOR_B64) self.assertFalse(after is query) q_pb = after.to_protobuf() @@ -364,7 +364,7 @@ def test_with_cursor_w_both(self): _KIND = 'KIND' connection = _Connection() dataset = _Dataset(_DATASET, connection) - query = self._makeOne(_KIND, dataset) + query = self._makeOne(dataset, _KIND) after = query.with_cursor(_START_B64, _END_B64) self.assertFalse(after is query) q_pb = after.to_protobuf() @@ -373,7 +373,7 @@ def test_with_cursor_w_both(self): def test_order_empty(self): _KIND = 'KIND' - before = self._makeOne(_KIND) + before = self._makeOne(kind=_KIND) after = before.order() self.assertFalse(after is before) self.assertTrue(isinstance(after, self._getTargetClass())) @@ -381,7 +381,7 @@ def test_order_empty(self): def test_order_single_asc(self): _KIND = 'KIND' - before = self._makeOne(_KIND) + before = self._makeOne(kind=_KIND) after = before.order('field') after_pb = after.to_protobuf() order_pb = list(after_pb.order) @@ -392,7 +392,7 @@ def test_order_single_asc(self): def test_order_single_desc(self): _KIND = 'KIND' - before = self._makeOne(_KIND) + before = self._makeOne(kind=_KIND) after = before.order('-field') after_pb = after.to_protobuf() order_pb = list(after_pb.order) @@ -403,7 +403,7 @@ def test_order_single_desc(self): def test_order_multiple(self): _KIND = 'KIND' - before = self._makeOne(_KIND) + before = self._makeOne(kind=_KIND) after = before.order('foo', '-bar') after_pb = after.to_protobuf() order_pb = list(after_pb.order) From 3a62db9e4c8975d8747a5a6e469700069ad28186 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 20 Oct 2014 19:06:57 -0400 Subject: [PATCH 07/11] Rename '_query_factory' helper -> '_get_factory' to avoid confustion. Incorporates feedback from @dhermees. --- gcloud/datastore/_helpers.py | 4 ++-- gcloud/datastore/query.py | 2 +- gcloud/datastore/test__helpers.py | 8 ++++---- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/gcloud/datastore/_helpers.py b/gcloud/datastore/_helpers.py index be392bdfd086..24ca36fd6b9d 100644 --- a/gcloud/datastore/_helpers.py +++ b/gcloud/datastore/_helpers.py @@ -30,7 +30,7 @@ def _register_factory(name, factory): _FACTORIES[name] = factory -def _query_factory(name): +def _get_factory(name): """Look up a factory by name.""" factory = _FACTORIES.get(name) if factory is None: @@ -40,7 +40,7 @@ def _query_factory(name): def _invoke_factory(name, *args, **kw): """Look up and call a factory by name.""" - return _query_factory(name)(*args, **kw) + return _get_factory(name)(*args, **kw) def _get_protobuf_attribute_and_value(val): diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index 4c5aedbf3926..6406057eb356 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -192,7 +192,7 @@ def ancestor(self, ancestor): ancestor = _helpers._invoke_factory('Key_path', *ancestor) # If we don't have a Key value by now, something is wrong. - if not isinstance(ancestor, _helpers._query_factory('Key')): + if not isinstance(ancestor, _helpers._get_factory('Key')): raise TypeError('Expected list or Key, got %s.' % type(ancestor)) # Get the composite filter and add a new property filter. diff --git a/gcloud/datastore/test__helpers.py b/gcloud/datastore/test__helpers.py index cc1ccba76df7..d799fa4ad035 100644 --- a/gcloud/datastore/test__helpers.py +++ b/gcloud/datastore/test__helpers.py @@ -54,18 +54,18 @@ class _Bar(object): self.assertRaises(DuplicateFactory, self._callFUT, 'Foo', _Bar) -class Test__query_factory(_FactoryBase, unittest2.TestCase): +class Test__get_factory(_FactoryBase, unittest2.TestCase): def setUp(self): from gcloud.datastore._helpers import _FACTORIES - super(Test__query_factory, self).setUp() + super(Test__get_factory, self).setUp() _FACTORIES['Widget'] = self._factory def _callFUT(self, name, *args, **kw): - from gcloud.datastore._helpers import _query_factory + from gcloud.datastore._helpers import _get_factory - return _query_factory(name, *args, **kw) + return _get_factory(name, *args, **kw) @staticmethod def _factory(*args, **kw): # pragma: NO COVER From daf889c52b70a87c03328050163e7b16fe7842fd Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 21 Oct 2014 00:09:00 -0400 Subject: [PATCH 08/11] Avoid relative imports. Incorporate feedback from @dhermes. --- gcloud/datastore/dataset.py | 2 +- gcloud/datastore/entity.py | 4 ++-- gcloud/datastore/key.py | 4 ++-- gcloud/datastore/query.py | 4 ++-- gcloud/datastore/transaction.py | 4 ++-- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/gcloud/datastore/dataset.py b/gcloud/datastore/dataset.py index 2100098c8de8..1c6c850d20cd 100644 --- a/gcloud/datastore/dataset.py +++ b/gcloud/datastore/dataset.py @@ -1,6 +1,6 @@ """Create / interact with gcloud datastore datasets.""" -from . import _helpers +from gcloud.datastore import _helpers class Dataset(object): diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 72c93b2a2063..1911cb42fea1 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -15,8 +15,8 @@ delete or persist the data stored on the entity. """ -from . import datastore_v1_pb2 as datastore_pb -from . import _helpers +from gcloud.datastore import datastore_v1_pb2 as datastore_pb +from gcloud.datastore import _helpers class NoKey(RuntimeError): diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index cf4d560b1ae6..7d6cd3a70692 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -3,8 +3,8 @@ import copy from itertools import izip -from . import datastore_v1_pb2 as datastore_pb -from . import _helpers +from gcloud.datastore import datastore_v1_pb2 as datastore_pb +from gcloud.datastore import _helpers class Key(object): diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index 6406057eb356..5d5c80babf30 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -2,8 +2,8 @@ import base64 -from . import datastore_v1_pb2 as datastore_pb -from . import _helpers +from gcloud.datastore import datastore_v1_pb2 as datastore_pb +from gcloud.datastore import _helpers class Query(object): diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py index f0f0e0d1ad30..ce766e21e16c 100644 --- a/gcloud/datastore/transaction.py +++ b/gcloud/datastore/transaction.py @@ -1,7 +1,7 @@ """Create / interact with gcloud datastore transactions.""" -from . import datastore_v1_pb2 as datastore_pb -from . import _helpers +from gcloud.datastore import datastore_v1_pb2 as datastore_pb +from gcloud.datastore import _helpers class Transaction(object): From a2c8bcb618b94410ea4baa9065d4498e0bd0b095 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 21 Oct 2014 00:09:53 -0400 Subject: [PATCH 09/11] Encapsulate factory registry as a singleton object. Use its methods, rather than free functions. --- gcloud/datastore/__init__.py | 1 + gcloud/datastore/_helpers.py | 46 +++++++----- gcloud/datastore/dataset.py | 8 +- gcloud/datastore/entity.py | 11 +-- gcloud/datastore/key.py | 6 +- gcloud/datastore/query.py | 10 +-- gcloud/datastore/test__helpers.py | 121 ++++++++++++------------------ gcloud/datastore/transaction.py | 4 +- 8 files changed, 96 insertions(+), 111 deletions(-) diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index fcefe356a3a9..5391092caba8 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -34,6 +34,7 @@ which represents a lookup or search over the rows in the datastore. """ +# import submodules which register factories. import gcloud.datastore.connection import gcloud.datastore.dataset import gcloud.datastore.entity diff --git a/gcloud/datastore/_helpers.py b/gcloud/datastore/_helpers.py index 24ca36fd6b9d..16bb552177f5 100644 --- a/gcloud/datastore/_helpers.py +++ b/gcloud/datastore/_helpers.py @@ -20,27 +20,39 @@ class InvalidFactory(Exception): """Unknown factory invocation.""" -_FACTORIES = {} # registry: name -> callable +class _FactoryRegistry(object): + """Single registry for named factories. + This registry provides an API for modules to instantiate objects from + other modules without needing to import them directly, allowing us + to break circular import chains. + """ + + _MARKER = object() + + def __init__(self): + self._registered = {} -def _register_factory(name, factory): - """Register a factory by name.""" - if _FACTORIES.get(name) not in (None, factory): - raise DuplicateFactory(name) - _FACTORIES[name] = factory + def register(self, name, factory): + """Register a factory by name.""" + if self._registered.get(name) not in (None, factory): + raise DuplicateFactory(name) + self._registered[name] = factory + def get(self, name, default=_MARKER): + """Look up a factory by name.""" + found = self._registered.get(name, default) + if found is self._MARKER: + raise InvalidFactory(name) + return found -def _get_factory(name): - """Look up a factory by name.""" - factory = _FACTORIES.get(name) - if factory is None: - raise InvalidFactory(name) - return factory + def invoke(self, name, *args, **kw): + """Look up and call a factory by name.""" + return self.get(name)(*args, **kw) -def _invoke_factory(name, *args, **kw): - """Look up and call a factory by name.""" - return _get_factory(name)(*args, **kw) +_FACTORIES = _FactoryRegistry() # singleton +del _FactoryRegistry def _get_protobuf_attribute_and_value(val): @@ -136,7 +148,7 @@ def _get_value_from_value_pb(value_pb): result = naive.replace(tzinfo=pytz.utc) elif value_pb.HasField('key_value'): - result = _FACTORIES['Key'].from_protobuf(value_pb.key_value) + result = _FACTORIES.get('Key_from_protobuf')(value_pb.key_value) elif value_pb.HasField('boolean_value'): result = value_pb.boolean_value @@ -154,7 +166,7 @@ def _get_value_from_value_pb(value_pb): result = value_pb.blob_value elif value_pb.HasField('entity_value'): - result = _FACTORIES['Entity'].from_protobuf(value_pb.entity_value) + result = _FACTORIES.get('Entity_pb')(value_pb.entity_value) elif value_pb.list_value: result = [_get_value_from_value_pb(x) for x in value_pb.list_value] diff --git a/gcloud/datastore/dataset.py b/gcloud/datastore/dataset.py index 1c6c850d20cd..677a50ac560e 100644 --- a/gcloud/datastore/dataset.py +++ b/gcloud/datastore/dataset.py @@ -73,7 +73,7 @@ def transaction(self, *args, **kwargs): :returns: a new Transaction instance, bound to this dataset. """ kwargs['dataset'] = self - return _helpers._invoke_factory('Transaction', *args, **kwargs) + return _helpers._FACTORIES.invoke('Transaction', *args, **kwargs) def get_entity(self, key): """Retrieves entity from the dataset, along with its attributes. @@ -104,9 +104,9 @@ def get_entities(self, keys): entities = [] for entity_pb in entity_pbs: - entities.append(_helpers._invoke_factory('Entity_pb', - entity_pb, dataset=self)) + entities.append(_helpers._FACTORIES.invoke( + 'Entity_pb', entity_pb, dataset=self)) return entities -_helpers._register_factory('Dataset', Dataset) +_helpers._FACTORIES.register('Dataset', Dataset) diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 1911cb42fea1..b610fbbb575d 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -69,7 +69,7 @@ def __init__(self, dataset=None, kind=None): super(Entity, self).__init__() self._dataset = dataset if kind: - self._key = _helpers._invoke_factory('Key').kind(kind) + self._key = _helpers._FACTORIES.invoke('Key').kind(kind) else: self._key = None @@ -154,7 +154,7 @@ def from_protobuf(cls, pb, dataset=None): :returns: The :class:`Entity` derived from the :class:`gcloud.datastore.datastore_v1_pb2.Entity`. """ - key = _helpers._invoke_factory('Key_from_protobuf', pb.key) + key = _helpers._FACTORIES.invoke('Key_from_protobuf', pb.key) entity = cls.from_key(key, dataset) for property_pb in pb.property: @@ -239,7 +239,8 @@ def save(self): transaction.add_auto_id_entity(self) if isinstance(key_pb, datastore_pb.Key): - updated_key = _helpers._invoke_factory('Key_from_protobuf', key_pb) + updated_key = _helpers._FACTORIES.invoke( + 'Key_from_protobuf', key_pb) # Update the path (which may have been altered). self._key = key.path(updated_key.path()) @@ -268,5 +269,5 @@ def __repr__(self): return '' % (super(Entity, self).__repr__()) -_helpers._register_factory('Entity', Entity) -_helpers._register_factory('Entity_pb', Entity.from_protobuf) +_helpers._FACTORIES.register('Entity', Entity) +_helpers._FACTORIES.register('Entity_pb', Entity.from_protobuf) diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index 7d6cd3a70692..f18d4ef96dd6 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -254,6 +254,6 @@ def __repr__(self): return '' % self.path() -_helpers._register_factory('Key', Key) -_helpers._register_factory('Key_from_protobuf', Key.from_protobuf) -_helpers._register_factory('Key_path', Key.from_path) +_helpers._FACTORIES.register('Key', Key) +_helpers._FACTORIES.register('Key_from_protobuf', Key.from_protobuf) +_helpers._FACTORIES.register('Key_from_path', Key.from_path) diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index 5d5c80babf30..40619f116db0 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -189,10 +189,10 @@ def ancestor(self, ancestor): # If a list was provided, turn it into a Key. if isinstance(ancestor, list): - ancestor = _helpers._invoke_factory('Key_path', *ancestor) + ancestor = _helpers._FACTORIES.invoke('Key_from_path', *ancestor) # If we don't have a Key value by now, something is wrong. - if not isinstance(ancestor, _helpers._get_factory('Key')): + if not isinstance(ancestor, _helpers._FACTORIES.get('Key')): raise TypeError('Expected list or Key, got %s.' % type(ancestor)) # Get the composite filter and add a new property filter. @@ -322,8 +322,8 @@ def fetch(self, limit=None): entity_pbs, end_cursor = query_results[:2] self._cursor = end_cursor - return [_helpers._invoke_factory('Entity_pb', entity, - dataset=self.dataset()) + return [_helpers._FACTORIES.invoke('Entity_pb', entity, + dataset=self.dataset()) for entity in entity_pbs] def cursor(self): @@ -393,4 +393,4 @@ def order(self, *properties): return clone -_helpers._register_factory('Query', Query) +_helpers._FACTORIES.register('Query', Query) diff --git a/gcloud/datastore/test__helpers.py b/gcloud/datastore/test__helpers.py index d799fa4ad035..cde28b64e171 100644 --- a/gcloud/datastore/test__helpers.py +++ b/gcloud/datastore/test__helpers.py @@ -1,47 +1,40 @@ import unittest2 -class _FactoryBase(object): +class Test_FactoryRegistry(unittest2.TestCase): def setUp(self): - from gcloud.datastore._helpers import _FACTORIES - - self._before = _FACTORIES.copy() + self._widget = object() + self._called_with = [] - def tearDown(self): + def _makeOne(self): from gcloud.datastore._helpers import _FACTORIES + return type(_FACTORIES)() - _FACTORIES.clear() - _FACTORIES.update(self._before) - - -class Test__register_factory(_FactoryBase, unittest2.TestCase): - - def _callFUT(self, name, factory): - from gcloud.datastore._helpers import _register_factory - - return _register_factory(name, factory) + def _factory(self, *args, **kw): # pragma: NO COVER + self._called_with.append((args, kw)) + return self._widget - def test_it(self): - from gcloud.datastore._helpers import _FACTORIES + def test_register(self): class _Foo(object): pass - self._callFUT('Foo', _Foo) - self.assertTrue(_FACTORIES['Foo'] is _Foo) + factories = self._makeOne() + factories.register('Foo', _Foo) + self.assertTrue(factories.get('Foo') is _Foo) - def test_duplicate_exact(self): - from gcloud.datastore._helpers import _FACTORIES + def test_register_duplicate_exact(self): class _Foo(object): pass - self._callFUT('Foo', _Foo) - self._callFUT('Foo', _Foo) - self.assertTrue(_FACTORIES['Foo'] is _Foo) + factories = self._makeOne() + factories.register('Foo', _Foo) + factories.register('Foo', _Foo) + self.assertTrue(factories.get('Foo') is _Foo) - def test_duplicate_conflict(self): + def test_register_duplicate_conflict(self): from gcloud.datastore._helpers import DuplicateFactory class _Foo(object): @@ -50,70 +43,48 @@ class _Foo(object): class _Bar(object): pass - self._callFUT('Foo', _Foo) - self.assertRaises(DuplicateFactory, self._callFUT, 'Foo', _Bar) - - -class Test__get_factory(_FactoryBase, unittest2.TestCase): - - def setUp(self): - from gcloud.datastore._helpers import _FACTORIES - - super(Test__get_factory, self).setUp() - _FACTORIES['Widget'] = self._factory - - def _callFUT(self, name, *args, **kw): - from gcloud.datastore._helpers import _get_factory - - return _get_factory(name, *args, **kw) - - @staticmethod - def _factory(*args, **kw): # pragma: NO COVER - pass + factories = self._makeOne() + factories.register('Foo', _Foo) + self.assertRaises(DuplicateFactory, factories.register, 'Foo', _Bar) - def test_miss(self): + def test_get_miss(self): from gcloud.datastore._helpers import InvalidFactory - self.assertRaises(InvalidFactory, self._callFUT, 'Nonesuch') + factories = self._makeOne() + self.assertRaises(InvalidFactory, factories.get, 'Nonesuch') - def test_hit(self): - self.assertTrue(self._callFUT('Widget') is self._factory) + def test_get_hit(self): + factories = self._makeOne() + # Use a bare function to avoid method wrappers for testing. + def _bare_factory(): # pragma: NO COVER + pass -class Test__invoke_factory(_FactoryBase, unittest2.TestCase): - - def setUp(self): - from gcloud.datastore._helpers import _FACTORIES - - super(Test__invoke_factory, self).setUp() - self._called_with = [] - self._widget = object() - _FACTORIES['Widget'] = self._factory - - def _callFUT(self, name, *args, **kw): - from gcloud.datastore._helpers import _invoke_factory - - return _invoke_factory(name, *args, **kw) - - def _factory(self, *args, **kw): - self._called_with.append((args, kw)) - return self._widget + factories.register('Widget', _bare_factory) + self.assertTrue(factories.get('Widget') is _bare_factory) - def test_missing_registration(self): + def test_invoke_miss(self): from gcloud.datastore._helpers import InvalidFactory - self.assertRaises(InvalidFactory, self._callFUT, 'Nonesuch') + factories = self._makeOne() + self.assertRaises(InvalidFactory, factories.invoke, 'Nonesuch') - def test_wo_args_or_kw(self): - self.assertTrue(self._callFUT('Widget') is self._widget) + def test_invoke_wo_args_or_kw(self): + factories = self._makeOne() + factories.register('Widget', self._factory) + self.assertTrue(factories.invoke('Widget') is self._widget) self.assertEqual(self._called_with, [((), {})]) - def test_w_args(self): - self.assertTrue(self._callFUT('Widget', 'foo', 42) is self._widget) + def test_invoke_w_args(self): + factories = self._makeOne() + factories.register('Widget', self._factory) + self.assertTrue(factories.invoke('Widget', 'foo', 42) is self._widget) self.assertEqual(self._called_with, [(('foo', 42), {})]) - def test_w_kw(self): - self.assertTrue(self._callFUT('Widget', foo=42) is self._widget) + def test_invoke_w_kw(self): + factories = self._makeOne() + factories.register('Widget', self._factory) + self.assertTrue(factories.invoke('Widget', foo=42) is self._widget) self.assertEqual(self._called_with, [((), {'foo': 42})]) diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py index ce766e21e16c..f5425ea49c4a 100644 --- a/gcloud/datastore/transaction.py +++ b/gcloud/datastore/transaction.py @@ -237,7 +237,7 @@ def commit(self): key_pb = result.insert_auto_id_key[i] # We can ignore the 'dataset' parameter ecause we are only # creating a transient instance in order to copy its path. - key = _helpers._invoke_factory('Key_pb', key_pb) + key = _helpers._FACTORIES.invoke('Key_from_protobuf', key_pb) entity.key(entity.key().path(key.path())) # Tell the connection that the transaction is over. @@ -257,4 +257,4 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.rollback() -_helpers._register_factory('Transaction', Transaction) +_helpers._FACTORIES.register('Transaction', Transaction) From 3cc965c4d0e54b38edc50e658bc0784da45e4498 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 21 Oct 2014 13:04:46 -0400 Subject: [PATCH 10/11] Rename alternate factories for clarity. --- gcloud/datastore/_helpers.py | 2 +- gcloud/datastore/dataset.py | 2 +- gcloud/datastore/entity.py | 2 +- gcloud/datastore/query.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/gcloud/datastore/_helpers.py b/gcloud/datastore/_helpers.py index 16bb552177f5..b1043ea16a4b 100644 --- a/gcloud/datastore/_helpers.py +++ b/gcloud/datastore/_helpers.py @@ -166,7 +166,7 @@ def _get_value_from_value_pb(value_pb): result = value_pb.blob_value elif value_pb.HasField('entity_value'): - result = _FACTORIES.get('Entity_pb')(value_pb.entity_value) + result = _FACTORIES.get('Entity_from_protobuf')(value_pb.entity_value) elif value_pb.list_value: result = [_get_value_from_value_pb(x) for x in value_pb.list_value] diff --git a/gcloud/datastore/dataset.py b/gcloud/datastore/dataset.py index 677a50ac560e..f3ee10185fd0 100644 --- a/gcloud/datastore/dataset.py +++ b/gcloud/datastore/dataset.py @@ -105,7 +105,7 @@ def get_entities(self, keys): entities = [] for entity_pb in entity_pbs: entities.append(_helpers._FACTORIES.invoke( - 'Entity_pb', entity_pb, dataset=self)) + 'Entity_from_protobuf', entity_pb, dataset=self)) return entities diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index b610fbbb575d..a457304d0ac8 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -270,4 +270,4 @@ def __repr__(self): _helpers._FACTORIES.register('Entity', Entity) -_helpers._FACTORIES.register('Entity_pb', Entity.from_protobuf) +_helpers._FACTORIES.register('Entity_from_protobuf', Entity.from_protobuf) diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index 40619f116db0..e550f1aa36e6 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -322,7 +322,7 @@ def fetch(self, limit=None): entity_pbs, end_cursor = query_results[:2] self._cursor = end_cursor - return [_helpers._FACTORIES.invoke('Entity_pb', entity, + return [_helpers._FACTORIES.invoke('Entity_from_protobuf', entity, dataset=self.dataset()) for entity in entity_pbs] From d539b1736a8ca414464b3f6f50fa6c687e7f551b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 21 Oct 2014 13:10:59 -0400 Subject: [PATCH 11/11] Use 'invoke' rather than 'get' followed by call to invoke factories. --- gcloud/datastore/_helpers.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/gcloud/datastore/_helpers.py b/gcloud/datastore/_helpers.py index b1043ea16a4b..453290e9b063 100644 --- a/gcloud/datastore/_helpers.py +++ b/gcloud/datastore/_helpers.py @@ -148,7 +148,7 @@ def _get_value_from_value_pb(value_pb): result = naive.replace(tzinfo=pytz.utc) elif value_pb.HasField('key_value'): - result = _FACTORIES.get('Key_from_protobuf')(value_pb.key_value) + result = _FACTORIES.invoke('Key_from_protobuf', value_pb.key_value) elif value_pb.HasField('boolean_value'): result = value_pb.boolean_value @@ -166,7 +166,8 @@ def _get_value_from_value_pb(value_pb): result = value_pb.blob_value elif value_pb.HasField('entity_value'): - result = _FACTORIES.get('Entity_from_protobuf')(value_pb.entity_value) + result = _FACTORIES.invoke( + 'Entity_from_protobuf', value_pb.entity_value) elif value_pb.list_value: result = [_get_value_from_value_pb(x) for x in value_pb.list_value]