Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions docs/datastore-batches.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
Batches
~~~~~~~

.. automodule:: gcloud.datastore.batch
:members:
:undoc-members:
:show-inheritance:
3 changes: 2 additions & 1 deletion docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@
datastore-api
datastore-entities
datastore-keys
datastore-transactions
datastore-queries
datastore-transactions
datastore-batches
storage-api
storage-buckets
storage-keys
Expand Down
169 changes: 169 additions & 0 deletions gcloud/datastore/batch.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Create / interact with a batch of updates / deletes."""

from gcloud.datastore import _implicit_environ
from gcloud.datastore import datastore_v1_pb2 as datastore_pb


class Batch(object):
"""An abstraction representing a collected group of updates / deletes.

Used to build up a bulk mutuation.

For example, the following snippet of code will put the two ``save``
operations and the delete operatiuon into the same mutation, and send
them to the server in a single API request::

>>> from gcloud import datastore
>>> batch = Batch()
>>> batch.put(entity1)
>>> batch.put(entity2)
>>> batch.delete(key3)
>>> batch.commit()

You can also use a batch as a context manager, in which case the
``commit`` will be called automatically if its block exits without
raising an exception::

>>> with Batch() as batch:
... batch.put(entity1)
... batch.put(entity2)
... batch.delete(key3)

By default, no updates will be sent if the block exits with an error::

>>> from gcloud import datastore
>>> dataset = datastore.get_dataset('dataset-id')
>>> with Batch as batch:
... do_some_work(batch)
... raise Exception() # rolls back
"""

def __init__(self, dataset_id=None, connection=None):
""" Construct a batch.

:type dataset_id: :class:`str`.
:param dataset_id: The ID of the dataset.

:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: The connection used to connect to datastore.

:raises: :class:`ValueError` if either a connection or dataset ID
are not set.
"""
self._connection = connection or _implicit_environ.CONNECTION
self._dataset_id = dataset_id or _implicit_environ.DATASET_ID

if self._connection is None or self._dataset_id is None:
raise ValueError('A batch must have a connection and '
'a dataset ID set.')

self._mutation = datastore_pb.Mutation()

@property
def dataset_id(self):
"""Getter for dataset ID in which the batch will run.

:rtype: :class:`str`
:returns: The dataset ID in which the batch will run.
"""
return self._dataset_id

@property
def connection(self):
"""Getter for connection over which the batch will run.

:rtype: :class:`gcloud.datastore.connection.Connection`
:returns: The connection over which the batch will run.
"""
return self._connection

@property
def mutation(self):
"""Getter for the current mutation.

Every batch is committed with a single Mutation
representing the 'work' to be done as part of the batch.
Inside a batch, calling ``batch.put()`` with an entity, or
``batch.delete`` with a key, builds up the mutation.
This getter returns the Mutation protobuf that
has been built-up so far.

:rtype: :class:`gcloud.datastore.datastore_v1_pb2.Mutation`
:returns: The Mutation protobuf to be sent in the commit request.
"""
return self._mutation

def put(self, entity):
"""Remember an entity's state to be saved during ``commit``.

.. note::
Any existing properties for the entity will be replaced by those
currently set on this instance. Already-stored properties which do
not correspond to keys set on this instance will be removed from
the datastore.

.. note::
Property values which are "text" ('unicode' in Python2, 'str' in
Python3) map to 'string_value' in the datastore; values which are
"bytes" ('str' in Python2, 'bytes' in Python3) map to 'blob_value'.

:type entity: :class:`gcloud.datastore.entity.Entity`
:param entity: the entity to be saved.

:raises: ValueError if entity has no key assigned.
"""
if entity.key is None:
raise ValueError("Entity must have a key")

key_pb = entity.key.to_protobuf()
properties = dict(entity)
exclude = tuple(entity.exclude_from_indexes)

self.connection.save_entity(

This comment was marked as spam.

This comment was marked as spam.

This comment was marked as spam.

self.dataset_id, key_pb, properties,
exclude_from_indexes=exclude, mutation=self.mutation)

def delete(self, key):
"""Remember a key to be deleted durring ``commit``.

:type key: :class:`gcloud.datastore.key.Key`
:param key: the key to be deleted.

:raises: ValueError if key is not complete.
"""
if key.is_partial:
raise ValueError("Key must be complete")

key_pb = key.to_protobuf()
self.connection.delete_entities(
self.dataset_id, [key_pb], mutation=self.mutation)

def commit(self):
"""Commits the batch.

This is called automatically upon exiting a with statement,
however it can be called explicitly if you don't want to use a
context manager.
"""
self.connection.commit(self._dataset_id, self.mutation)

def __enter__(self):
return self

def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
self.commit()
34 changes: 28 additions & 6 deletions gcloud/datastore/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -421,7 +421,7 @@ def allocate_ids(self, dataset_id, key_pbs):
return list(response.key)

def save_entity(self, dataset_id, key_pb, properties,

This comment was marked as spam.

This comment was marked as spam.

This comment was marked as spam.

This comment was marked as spam.

This comment was marked as spam.

exclude_from_indexes=()):
exclude_from_indexes=(), mutation=None):
"""Save an entity to the Cloud Datastore with the provided properties.

.. note::
Expand All @@ -441,13 +441,24 @@ def save_entity(self, dataset_id, key_pb, properties,
:type exclude_from_indexes: sequence of string
:param exclude_from_indexes: Names of properties *not* to be indexed.

:type mutation: :class:`gcloud.datastore.datastore_v1_pb2.Mutation`
or None.
:param mutation: If passed, the mutation protobuf into which the
entity will be saved. If None, use th result
of calling ``self.mutation()``

:rtype: tuple
:returns: The pair (``assigned``, ``new_id``) where ``assigned`` is a
boolean indicating if a new ID has been assigned and
``new_id`` is either ``None`` or an integer that has been
assigned.
"""
mutation = self.mutation()
if mutation is not None:
in_batch = True
else:
in_batch = False
mutation = self.mutation()

key_pb = helpers._prepare_key_for_request(key_pb)

# If the Key is complete, we should upsert
Expand Down Expand Up @@ -479,7 +490,7 @@ def save_entity(self, dataset_id, key_pb, properties,

# If this is in a transaction, we should just return True. The
# transaction will handle assigning any keys as necessary.
if self.transaction():
if in_batch or self.transaction():
return False, None

result = self.commit(dataset_id, mutation)
Expand All @@ -493,7 +504,7 @@ def save_entity(self, dataset_id, key_pb, properties,

return False, None

def delete_entities(self, dataset_id, key_pbs):
def delete_entities(self, dataset_id, key_pbs, mutation=None):

This comment was marked as spam.

"""Delete keys from a dataset in the Cloud Datastore.

This method deals only with
Expand All @@ -508,13 +519,24 @@ def delete_entities(self, dataset_id, key_pbs):
:type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param key_pbs: The keys to delete from the datastore.

:type mutation: :class:`gcloud.datastore.datastore_v1_pb2.Mutation`
or None.
:param mutation: If passed, the mutation protobuf into which the
deletion will be saved. If None, use th result
of calling ``self.mutation()``

:rtype: boolean
:returns: ``True``
"""
mutation = self.mutation()
if mutation is not None:
in_batch = True
else:
in_batch = False
mutation = self.mutation()

helpers._add_keys_to_request(mutation.delete, key_pbs)

if not self.transaction():
if not in_batch and not self.transaction():
self.commit(dataset_id, mutation)

return True
Expand Down
Loading