Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
86 changes: 49 additions & 37 deletions gcloud/datastore/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,10 @@
from gcloud.datastore import _implicit_environ
from gcloud.datastore import datastore_v1_pb2 as datastore_pb
from gcloud.datastore import helpers
from gcloud.datastore.dataset import Dataset
from gcloud.datastore.key import Key


class Query(_implicit_environ._DatastoreBase):
class Query(object):
"""A Query against the Cloud Datastore.

This class serves as an abstraction for creating a query over data
Expand All @@ -32,8 +31,9 @@ class Query(_implicit_environ._DatastoreBase):
:type kind: string.
:param kind: The kind to query.

:type dataset: :class:`gcloud.datastore.dataset.Dataset`.
:param dataset: The dataset to query.
:type dataset_id: str
:param dataset_id: The ID of the dataset to query. If not passed,
uses the implicit default.

:type namespace: string or None.
:param namespace: The namespace to which to restrict results.
Expand All @@ -54,6 +54,9 @@ class Query(_implicit_environ._DatastoreBase):

:type group_by: sequence_of_string.
:param group_by: field names used to group query results.

:raises: ValueError if ``dataset_id`` is not passed and no implicit
default is set.
"""

OPERATORS = {
Expand All @@ -66,15 +69,22 @@ class Query(_implicit_environ._DatastoreBase):
"""Mapping of operator strings and their protobuf equivalents."""

def __init__(self,
dataset_id=None,
kind=None,
dataset=None,
namespace=None,
ancestor=None,
filters=(),
projection=(),
order=(),
group_by=()):
super(Query, self).__init__(dataset=dataset)

if dataset_id is None:

This comment was marked as spam.

This comment was marked as spam.

This comment was marked as spam.

dataset_id = _implicit_environ.DATASET_ID

if dataset_id is None:
raise ValueError("No dataset ID supplied, and no default set.")

self._dataset_id = dataset_id
self._kind = kind
self._namespace = namespace
self._ancestor = ancestor
Expand All @@ -84,26 +94,12 @@ def __init__(self,
self._group_by = list(group_by)

@property
def dataset(self):
"""Get the dataset for this Query.

The dataset against which the Query will be run.

:rtype: :class:`gcloud.datastore.dataset.Dataset` or None,
:returns: the current dataset.
"""
return self._dataset

@dataset.setter
def dataset(self, value):
"""Set the dataset for the query
def dataset_id(self):

This comment was marked as spam.

This comment was marked as spam.

This comment was marked as spam.

"""Get the dataset ID for this Query.

:type value: class:`gcloud.datastore.dataset.Dataset`
:param value: the new dataset
:rtype: str
"""
if not isinstance(value, Dataset):
raise ValueError("Dataset must be a Dataset")
self._dataset = value
return self._dataset_id

@property
def namespace(self):
Expand Down Expand Up @@ -294,34 +290,49 @@ def group_by(self, value):
value = [value]
self._group_by[:] = value

def fetch(self, limit=0, offset=0, start_cursor=None, end_cursor=None):
def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None,
connection=None):
"""Execute the Query; return an iterator for the matching entities.

For example::

>>> from gcloud import datastore
>>> dataset = datastore.get_dataset('dataset-id')
>>> query = dataset.query('Person').filter('name', '=', 'Sally')
>>> from gcloud.datastore.query import Query
>>> query = Query('dataset-id', 'Person')
>>> query.add_filter('name', '=', 'Sally')
>>> list(query.fetch())
[<Entity object>, <Entity object>, ...]
>>> list(query.fetch(1))
[<Entity object>]

:type limit: integer
:type limit: integer or None
:param limit: An optional limit passed through to the iterator.

:type limit: offset
:param limit: An optional offset passed through to the iterator.
:type offset: integer
:param offset: An optional offset passed through to the iterator.

:type start_cursor: offset
:type start_cursor: bytes
:param start_cursor: An optional cursor passed through to the iterator.

:type end_cursor: offset
:type end_cursor: bytes
:param end_cursor: An optional cursor passed through to the iterator.

:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: An optional cursor passed through to the iterator.
If not supplied, uses the implicit default.


:rtype: :class:`Iterator`
:raises: ValueError if ``connection`` is not passed and no implicit
default has been set.
"""
return Iterator(self, limit, offset, start_cursor, end_cursor)
if connection is None:
connection = _implicit_environ.CONNECTION

if connection is None:
raise ValueError("No connection passed, and no default set")

return Iterator(
self, connection, limit, offset, start_cursor, end_cursor)


class Iterator(object):
Expand All @@ -334,9 +345,10 @@ class Iterator(object):
datastore_pb.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT,
)

def __init__(self, query, limit=None, offset=0,
def __init__(self, query, connection, limit=None, offset=0,
start_cursor=None, end_cursor=None):
self._query = query
self._connection = connection
self._limit = limit
self._offset = offset
self._start_cursor = start_cursor
Expand Down Expand Up @@ -366,9 +378,9 @@ def next_page(self):

pb.offset = self._offset

query_results = self._query.dataset.connection().run_query(
query_results = self._connection.run_query(
query_pb=pb,
dataset_id=self._query.dataset.id(),
dataset_id=self._query.dataset_id,
namespace=self._query.namespace,
)
# NOTE: `query_results` contains an extra value that we don't use,
Expand Down
Loading