From 2a245e64c36dc9baec4c7583874dbbc015ff7176 Mon Sep 17 00:00:00 2001 From: Dmitry Timofeev Date: Thu, 27 Apr 2017 17:38:31 +0100 Subject: [PATCH 01/12] Update rows in bulk: initial version --- bigtable/google/cloud/bigtable/row.py | 7 ++++ bigtable/google/cloud/bigtable/table.py | 47 +++++++++++++++++++++++++ 2 files changed, 54 insertions(+) diff --git a/bigtable/google/cloud/bigtable/row.py b/bigtable/google/cloud/bigtable/row.py index 5e9075ef8eec..997d666f4209 100644 --- a/bigtable/google/cloud/bigtable/row.py +++ b/bigtable/google/cloud/bigtable/row.py @@ -55,6 +55,13 @@ def __init__(self, row_key, table): self._row_key = _to_bytes(row_key) self._table = table + @property + def row_key(self): + return self._row_key + + @property + def table(self): + return self._table class _SetDeleteRow(Row): """Row helper for setting or deleting cell values. diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 3fbd198d6b65..9f170e05828c 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -29,6 +29,10 @@ from google.cloud.bigtable.row_data import PartialRowsData +class RowBelongingError(Exception): + """Row from another table.""" + + class Table(object): """Representation of a Google Cloud Bigtable Table. @@ -276,6 +280,25 @@ def read_rows(self, start_key=None, end_key=None, limit=None, # We expect an iterator of `data_messages_v2_pb2.ReadRowsResponse` return PartialRowsData(response_iterator) + def mutate_rows(self, rows): + + _check_rows(self.name, rows) + mutate_rows_request = _mutate_rows_request(self.name, rows) + print(mutate_rows_request) + + unsuccessfully_mutated_rows = [] + client = self._instance._client + responses = client._data_stub.MutateRows(mutate_rows_request) + for response in responses: + for entry in response.entries: + if not entry.status.code: + rows[entry.index].clear() + else: + unsuccessfully_mutated_rows.append( + (entry, rows[entry.index])) + + return unsuccessfully_mutated_rows + def sample_row_keys(self): """Read a sample of row keys in the table. @@ -373,3 +396,27 @@ def _create_row_request(table_name, row_key=None, start_key=None, end_key=None, message.rows.row_ranges.add(**range_kwargs) return message + + +def _mutate_rows_request(table_name, rows): + request_pb = data_messages_v2_pb2.MutateRowsRequest(table_name=table_name) + + for row in rows: + entry = request_pb.entries.add() + entry.row_key = row.row_key + for mutation in row._get_mutations(None): + entry.mutations.add().CopyFrom(mutation) + + return request_pb + + +def _check_rows(table_name, rows): + """Checks that all rows belong to the table.""" + for row in rows: + if not isinstance(row, DirectRow): + raise TypeError("Bulk processing can not be applied for conditional" + "or append mutations.") + if row.table.name != table_name: + raise RowBelongingError( + "Row %s is a part of %s table. Current table: %s" % + (row.row_key, row.table.name, table_name)) From e194f7ce4cdc00053f01df1320a4bbedc04acd20 Mon Sep 17 00:00:00 2001 From: Dmitry Timofeev Date: Thu, 11 May 2017 18:21:16 +0100 Subject: [PATCH 02/12] Update rows in bulk --- bigtable/google/cloud/bigtable/table.py | 53 ++++++++++++++++++++----- bigtable/tests/unit/test_row.py | 20 ++++++++++ bigtable/tests/unit/test_table.py | 52 ++++++++++++++++++++++++ 3 files changed, 115 insertions(+), 10 deletions(-) diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 9f170e05828c..4063b346d946 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -33,6 +33,10 @@ class RowBelongingError(Exception): """Row from another table.""" +class TooManyMutationsError(Exception): + """The number of mutations for bulk request is too big.""" + + class Table(object): """Representation of a Google Cloud Bigtable Table. @@ -281,10 +285,17 @@ def read_rows(self, start_key=None, end_key=None, limit=None, return PartialRowsData(response_iterator) def mutate_rows(self, rows): - + """Mutates multiple rows in bulk. + + :type rows: list + :param rows: List or other iterable of :class:`.DirectRow` instances. + + :rtype: list + :returns: A list of tuples (``MutateRowsResponse.Entry`` protobuf + corresponding to the errors, :class:`.DirectRow`) + """ _check_rows(self.name, rows) mutate_rows_request = _mutate_rows_request(self.name, rows) - print(mutate_rows_request) unsuccessfully_mutated_rows = [] client = self._instance._client @@ -296,7 +307,6 @@ def mutate_rows(self, rows): else: unsuccessfully_mutated_rows.append( (entry, rows[entry.index])) - return unsuccessfully_mutated_rows def sample_row_keys(self): @@ -399,24 +409,47 @@ def _create_row_request(table_name, row_key=None, start_key=None, end_key=None, def _mutate_rows_request(table_name, rows): - request_pb = data_messages_v2_pb2.MutateRowsRequest(table_name=table_name) + """Creates a request to read rows in a table. + + :type table_name: str + :param table_name: The name of the table to read from. + :type rows: list + :param rows: List or other iterable of :class:`.DirectRow` instances. + + :rtype: :class:`data_messages_v2_pb2.MutateRowsRequest` + :returns: The ``MutateRowsRequest`` protobuf corresponding to the inputs. + :raises: :class:`TooManyMutationsError ` + if the number of mutations is grater than 100.000 + """ + request_pb = data_messages_v2_pb2.MutateRowsRequest(table_name=table_name) + mutations_count = 0 for row in rows: entry = request_pb.entries.add() entry.row_key = row.row_key for mutation in row._get_mutations(None): + mutations_count += 1 entry.mutations.add().CopyFrom(mutation) - + if mutations_count > 100000: + raise TooManyMutationsError('Maximum number of the entries mutations ' + 'is 100000') return request_pb def _check_rows(table_name, rows): - """Checks that all rows belong to the table.""" + """Checks that all rows belong to the table. + + :type table_name: str + :param table_name: The name of the table to read from. + + :type rows: list + :param rows: List or other iterable of :class:`.DirectRow` instances. + """ for row in rows: if not isinstance(row, DirectRow): - raise TypeError("Bulk processing can not be applied for conditional" - "or append mutations.") + raise TypeError('Bulk processing can not be applied for ' + 'conditional or append mutations.') if row.table.name != table_name: raise RowBelongingError( - "Row %s is a part of %s table. Current table: %s" % - (row.row_key, row.table.name, table_name)) + 'Row %s is a part of %s table. Current table: %s' % + (row.row_key, row.table.name, table_name)) \ No newline at end of file diff --git a/bigtable/tests/unit/test_row.py b/bigtable/tests/unit/test_row.py index 3e2d4fd60e0f..156a517b351a 100644 --- a/bigtable/tests/unit/test_row.py +++ b/bigtable/tests/unit/test_row.py @@ -16,6 +16,26 @@ import unittest +class TestRow(unittest.TestCase): + + @staticmethod + def _get_target_class(): + from google.cloud.bigtable.row import Row + + return Row + + def _make_one(self, *args, **kwargs): + return self._get_target_class()(*args, **kwargs) + + def test_row_key_getter(self): + row = self._make_one(row_key=b'row_key', table='table') + self.assertEqual(b'row_key', row.row_key) + + def test_row_table_getter(self): + row = self._make_one(row_key=b'row_key', table='table') + self.assertEqual('table', row.table) + + class Test_SetDeleteRow(unittest.TestCase): @staticmethod diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index 63844f5d48b7..dfbacf955f36 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -16,6 +16,53 @@ import unittest +class Test___mutate_rows_request(unittest.TestCase): + + def _call_fut(self, table_name, rows): + from google.cloud.bigtable.table import _mutate_rows_request + + return _mutate_rows_request(table_name, rows) + + def test__mutate_rows_too_many_mutations(self): + from google.cloud.bigtable.table import TooManyMutationsError + from google.cloud.bigtable.row import DirectRow + from google.cloud.bigtable._generated.data_pb2 import Mutation + + mutation = Mutation() + rows = [DirectRow(row_key=b'row_key', table='table'), + DirectRow(row_key=b'row_key_2', table='table')] + rows[0]._pb_mutations = [mutation for _ in range(0, 50000)] + rows[1]._pb_mutations = [mutation for _ in range(0, 50001)] + with self.assertRaises(TooManyMutationsError): + self._call_fut('table', rows) + + +class Test__check_rows(unittest.TestCase): + + def _call_fut(self, table_name, rows): + from google.cloud.bigtable.table import _check_rows + + return _check_rows(table_name, rows) + + def test__check_rows_wrong_row_type(self): + from google.cloud.bigtable.row import ConditionalRow + + rows = [ConditionalRow(row_key=b'row_key', table='table', filter_=None)] + with self.assertRaises(TypeError): + self._call_fut('table', rows) + + def test__check_rows_wrong_table_name(self): + from collections import namedtuple + from google.cloud.bigtable.table import RowBelongingError + from google.cloud.bigtable.row import DirectRow + + table = namedtuple('Table', ['name']) + table.name = 'table' + rows = [DirectRow(row_key=b'row_key', table=table)] + with self.assertRaises(RowBelongingError): + self._call_fut('other_table', rows) + + class TestTable(unittest.TestCase): PROJECT_ID = 'project-id' @@ -569,6 +616,11 @@ def _SampleRowKeysRequestPB(*args, **kw): return messages_v2_pb2.SampleRowKeysRequest(*args, **kw) +def _MutateRowsRequestPB(*args, **kw): + from google.cloud.bigtable._generated import ( + bigtable_pb2 as data_messages_v2_pb2) + + return data_messages_v2_pb2.MutateRowsRequest(*args, **kw) def _TablePB(*args, **kw): from google.cloud.bigtable._generated import ( From 6fa779623327ca5e3090d0fd284f7d6d58fa627f Mon Sep 17 00:00:00 2001 From: Dmitry Timofeev Date: Fri, 12 May 2017 10:17:27 +0100 Subject: [PATCH 03/12] BT bulk update: first set of comments' fixes --- bigtable/google/cloud/bigtable/table.py | 15 +++++++-------- bigtable/tests/unit/test_table.py | 10 +++++----- 2 files changed, 12 insertions(+), 13 deletions(-) diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 4063b346d946..3600a9a750e1 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -29,7 +29,7 @@ from google.cloud.bigtable.row_data import PartialRowsData -class RowBelongingError(Exception): +class TableMismatchError(Exception): """Row from another table.""" @@ -294,7 +294,7 @@ def mutate_rows(self, rows): :returns: A list of tuples (``MutateRowsResponse.Entry`` protobuf corresponding to the errors, :class:`.DirectRow`) """ - _check_rows(self.name, rows) + _check_rows_table_name_and_types(self.name, rows) mutate_rows_request = _mutate_rows_request(self.name, rows) unsuccessfully_mutated_rows = [] @@ -409,10 +409,10 @@ def _create_row_request(table_name, row_key=None, start_key=None, end_key=None, def _mutate_rows_request(table_name, rows): - """Creates a request to read rows in a table. + """Creates a request to mutate rows in a table. :type table_name: str - :param table_name: The name of the table to read from. + :param table_name: The name of the table to write to. :type rows: list :param rows: List or other iterable of :class:`.DirectRow` instances. @@ -431,12 +431,11 @@ def _mutate_rows_request(table_name, rows): mutations_count += 1 entry.mutations.add().CopyFrom(mutation) if mutations_count > 100000: - raise TooManyMutationsError('Maximum number of the entries mutations ' - 'is 100000') + raise TooManyMutationsError('Maximum number of mutations is 100000') return request_pb -def _check_rows(table_name, rows): +def _check_rows_table_name_and_types(table_name, rows): """Checks that all rows belong to the table. :type table_name: str @@ -450,6 +449,6 @@ def _check_rows(table_name, rows): raise TypeError('Bulk processing can not be applied for ' 'conditional or append mutations.') if row.table.name != table_name: - raise RowBelongingError( + raise TableMismatchError( 'Row %s is a part of %s table. Current table: %s' % (row.row_key, row.table.name, table_name)) \ No newline at end of file diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index dfbacf955f36..90403c57fe82 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -37,12 +37,12 @@ def test__mutate_rows_too_many_mutations(self): self._call_fut('table', rows) -class Test__check_rows(unittest.TestCase): +class Test__check_rows_table_name_and_types(unittest.TestCase): def _call_fut(self, table_name, rows): - from google.cloud.bigtable.table import _check_rows + from google.cloud.bigtable.table import _check_rows_table_name_and_types - return _check_rows(table_name, rows) + return _check_rows_table_name_and_types(table_name, rows) def test__check_rows_wrong_row_type(self): from google.cloud.bigtable.row import ConditionalRow @@ -53,13 +53,13 @@ def test__check_rows_wrong_row_type(self): def test__check_rows_wrong_table_name(self): from collections import namedtuple - from google.cloud.bigtable.table import RowBelongingError + from google.cloud.bigtable.table import TableMismatchError from google.cloud.bigtable.row import DirectRow table = namedtuple('Table', ['name']) table.name = 'table' rows = [DirectRow(row_key=b'row_key', table=table)] - with self.assertRaises(RowBelongingError): + with self.assertRaises(TableMismatchError): self._call_fut('other_table', rows) From 2382f0313f0512b93462f84bc6608d99e692eec3 Mon Sep 17 00:00:00 2001 From: Dmitry Timofeev Date: Sun, 21 May 2017 13:10:35 +0100 Subject: [PATCH 04/12] Fix the second part of comments. --- bigtable/google/cloud/bigtable/table.py | 66 ++++++++++++------- bigtable/tests/unit/test_table.py | 84 +++++++++++++++++++------ 2 files changed, 108 insertions(+), 42 deletions(-) diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 3600a9a750e1..019cddba2e4d 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -29,11 +29,14 @@ from google.cloud.bigtable.row_data import PartialRowsData -class TableMismatchError(Exception): +# Maximum number of mutations in bulk +_MAX_BULK_MUTATIONS = 100000 + +class TableMismatchError(ValueError): """Row from another table.""" -class TooManyMutationsError(Exception): +class TooManyMutationsError(ValueError): """The number of mutations for bulk request is too big.""" @@ -286,6 +289,11 @@ def read_rows(self, start_key=None, end_key=None, limit=None, def mutate_rows(self, rows): """Mutates multiple rows in bulk. + + The method tries to update all specified rows. + If some of the rows weren't updated, it would not remove mutations. They + can be applied to the row separately. + If row mutations finished successfully, they would be cleaned up. :type rows: list :param rows: List or other iterable of :class:`.DirectRow` instances. @@ -294,15 +302,13 @@ def mutate_rows(self, rows): :returns: A list of tuples (``MutateRowsResponse.Entry`` protobuf corresponding to the errors, :class:`.DirectRow`) """ - _check_rows_table_name_and_types(self.name, rows) mutate_rows_request = _mutate_rows_request(self.name, rows) - unsuccessfully_mutated_rows = [] client = self._instance._client responses = client._data_stub.MutateRows(mutate_rows_request) for response in responses: for entry in response.entries: - if not entry.status.code: + if entry.status.code == 0: rows[entry.index].clear() else: unsuccessfully_mutated_rows.append( @@ -419,36 +425,52 @@ def _mutate_rows_request(table_name, rows): :rtype: :class:`data_messages_v2_pb2.MutateRowsRequest` :returns: The ``MutateRowsRequest`` protobuf corresponding to the inputs. - :raises: :class:`TooManyMutationsError ` - if the number of mutations is grater than 100.000 + :raises: :exc:`~.table.TooManyMutationsError` if the number of mutations is + grater than 100,000 """ request_pb = data_messages_v2_pb2.MutateRowsRequest(table_name=table_name) mutations_count = 0 for row in rows: + _check_row_table_name(table_name, row) + _check_row_type(row) entry = request_pb.entries.add() entry.row_key = row.row_key for mutation in row._get_mutations(None): mutations_count += 1 entry.mutations.add().CopyFrom(mutation) - if mutations_count > 100000: - raise TooManyMutationsError('Maximum number of mutations is 100000') + if mutations_count > _MAX_BULK_MUTATIONS: + raise TooManyMutationsError('Maximum number of mutations is %s' % + _MAX_BULK_MUTATIONS) return request_pb -def _check_rows_table_name_and_types(table_name, rows): - """Checks that all rows belong to the table. +def _check_row_table_name(table_name, row): + """Checks that a row belong to the table. :type table_name: str - :param table_name: The name of the table to read from. + :param table_name: The name of the table. - :type rows: list - :param rows: List or other iterable of :class:`.DirectRow` instances. + :type row: :class:`.Row` + :param row: An instance of :class:`.Row` subclasses. + + :raises: :exc:`~.table.TableMismatchError` if the row does not belong to the + table. """ - for row in rows: - if not isinstance(row, DirectRow): - raise TypeError('Bulk processing can not be applied for ' - 'conditional or append mutations.') - if row.table.name != table_name: - raise TableMismatchError( - 'Row %s is a part of %s table. Current table: %s' % - (row.row_key, row.table.name, table_name)) \ No newline at end of file + if row.table.name != table_name: + raise TableMismatchError( + 'Row %s is a part of %s table. Current table: %s' % + (row.row_key, row.table.name, table_name)) + + +def _check_row_type(row): + """Checks that a row is an instance of :class:`.DirectRow`. + + :type row: :class:`.Row` + :param row: An instance of :class:`.Row` subclasses. + + :raises: :class:`TypeError ` if the row is not an + instance of DirectRow. + """ + if not isinstance(row, DirectRow): + raise TypeError('Bulk processing can not be applied for ' + 'conditional or append mutations.') diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index 90403c57fe82..25c2fac7251a 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -24,32 +24,30 @@ def _call_fut(self, table_name, rows): return _mutate_rows_request(table_name, rows) def test__mutate_rows_too_many_mutations(self): - from google.cloud.bigtable.table import TooManyMutationsError - from google.cloud.bigtable.row import DirectRow + from collections import namedtuple from google.cloud.bigtable._generated.data_pb2 import Mutation + from google.cloud.bigtable.row import DirectRow + import google.cloud.bigtable.table as table_module + from google.cloud.bigtable.table import TooManyMutationsError + table = namedtuple('Table', ['name']) + table.name = 'table' + table_module._MAX_BULK_MUTATIONS = 3 mutation = Mutation() - rows = [DirectRow(row_key=b'row_key', table='table'), - DirectRow(row_key=b'row_key_2', table='table')] - rows[0]._pb_mutations = [mutation for _ in range(0, 50000)] - rows[1]._pb_mutations = [mutation for _ in range(0, 50001)] + rows = [DirectRow(row_key=b'row_key', table=table), + DirectRow(row_key=b'row_key_2', table=table)] + rows[0]._pb_mutations = [mutation, mutation] + rows[1]._pb_mutations = [mutation, mutation] with self.assertRaises(TooManyMutationsError): self._call_fut('table', rows) -class Test__check_rows_table_name_and_types(unittest.TestCase): - - def _call_fut(self, table_name, rows): - from google.cloud.bigtable.table import _check_rows_table_name_and_types - - return _check_rows_table_name_and_types(table_name, rows) +class Test__check_row_table(unittest.TestCase): - def test__check_rows_wrong_row_type(self): - from google.cloud.bigtable.row import ConditionalRow + def _call_fut(self, table_name, row): + from google.cloud.bigtable.table import _check_row_table_name - rows = [ConditionalRow(row_key=b'row_key', table='table', filter_=None)] - with self.assertRaises(TypeError): - self._call_fut('table', rows) + return _check_row_table_name(table_name, row) def test__check_rows_wrong_table_name(self): from collections import namedtuple @@ -58,9 +56,23 @@ def test__check_rows_wrong_table_name(self): table = namedtuple('Table', ['name']) table.name = 'table' - rows = [DirectRow(row_key=b'row_key', table=table)] + row = DirectRow(row_key=b'row_key', table=table) with self.assertRaises(TableMismatchError): - self._call_fut('other_table', rows) + self._call_fut('other_table', row) + + +class Test__check_row_type(unittest.TestCase): + def _call_fut(self, table_name, row): + from google.cloud.bigtable.table import _check_row_type + + return _check_row_type(table_name, row) + + def test__check_rows_wrong_row_type(self): + from google.cloud.bigtable.row import ConditionalRow + + row = ConditionalRow(row_key=b'row_key', table='table', filter_=None) + with self.assertRaises(TypeError): + self._call_fut('table', row) class TestTable(unittest.TestCase): @@ -395,6 +407,37 @@ def test_read_row_still_partial(self): with self.assertRaises(ValueError): self._read_row_helper(chunks, None) + def test_mutate_rows(self): + from google.cloud.bigtable._generated.bigtable_pb2 import ( + MutateRowsResponse) + from google.cloud.bigtable.row import DirectRow + from tests.unit._testing import _FakeStub + + client = _Client() + instance = _Instance(self.INSTANCE_NAME, client=client) + table = self._make_one(self.TABLE_ID, instance) + + row_1 = DirectRow(row_key=b'row_key', table=table) + row_1.set_cell('cf', b'col', b'value1') + row_2 = DirectRow(row_key=b'row_key_2', table=table) + row_2.set_cell('cf', b'col', b'value2') + + response = MutateRowsResponse() + entry_1 = response.entries.add() + entry_1.status.code = 0 + entry_2 = response.entries.add() + entry_2.status.code = 1 + + # Patch the stub used by the API method. + client._data_stub = stub = _FakeStub([response]) + result = table.mutate_rows([row_1, row_2]) + + self.assertIs(row_1, result[0][1]) + self.assertTrue(len(result)) + self.assertFalse(row_1._get_mutations(None)) + self.assertTrue(row_2._get_mutations(None)) + + def test_read_rows(self): from google.cloud._testing import _Monkey from tests.unit._testing import _FakeStub @@ -616,7 +659,8 @@ def _SampleRowKeysRequestPB(*args, **kw): return messages_v2_pb2.SampleRowKeysRequest(*args, **kw) -def _MutateRowsRequestPB(*args, **kw): + +def _mutate_rows_request_pb(*args, **kw): from google.cloud.bigtable._generated import ( bigtable_pb2 as data_messages_v2_pb2) From edd7d62ffff4c40eb6730d18d1103a446ef1b604 Mon Sep 17 00:00:00 2001 From: Dmitry Timofeev Date: Sun, 21 May 2017 17:42:49 +0100 Subject: [PATCH 05/12] BT bulk update: system test for mutate_rows() --- bigtable/tests/system.py | 24 ++++++++++++++++++++++++ bigtable/tests/unit/test_table.py | 6 +++--- 2 files changed, 27 insertions(+), 3 deletions(-) diff --git a/bigtable/tests/system.py b/bigtable/tests/system.py index faed85fdb302..159a1169a1d4 100644 --- a/bigtable/tests/system.py +++ b/bigtable/tests/system.py @@ -356,6 +356,30 @@ def _write_to_row(self, row1=None, row2=None, row3=None, row4=None): cell4 = Cell(CELL_VAL4, timestamp4) return cell1, cell2, cell3, cell4 + def test_mutate_rows(self): + row_1 = self._table.row(ROW_KEY) + row_1.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL1) + row_1.commit() + row_2 = self._table.row(ROW_KEY_ALT) + row_2.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL2) + row_2.commit() + rows = [row_1, row_2] + self.rows_to_delete.extend(rows) + + # Change the contents + row_1.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL3) + row_2.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL4) + result = self._table.mutate_rows(rows) + self.assertFalse(result) + + # Check the contents + row_1_data = self._table.read_row(ROW_KEY) + self.assertEqual( + row_1_data.cells[COLUMN_FAMILY_ID1][COL_NAME1][0].value, CELL_VAL3) + row_2_data = self._table.read_row(ROW_KEY_ALT) + self.assertEqual( + row_2_data.cells[COLUMN_FAMILY_ID1][COL_NAME1][0].value, CELL_VAL4) + def test_read_large_cell_limit(self): row = self._table.row(ROW_KEY) self.rows_to_delete.append(row) diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index 25c2fac7251a..465bde9e2cba 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -42,7 +42,7 @@ def test__mutate_rows_too_many_mutations(self): self._call_fut('table', rows) -class Test__check_row_table(unittest.TestCase): +class Test__check_row_table_name(unittest.TestCase): def _call_fut(self, table_name, row): from google.cloud.bigtable.table import _check_row_table_name @@ -429,10 +429,10 @@ def test_mutate_rows(self): entry_2.status.code = 1 # Patch the stub used by the API method. - client._data_stub = stub = _FakeStub([response]) + client._data_stub = _FakeStub([response]) result = table.mutate_rows([row_1, row_2]) - self.assertIs(row_1, result[0][1]) + self.assertIs(result[0][1], row_1) self.assertTrue(len(result)) self.assertFalse(row_1._get_mutations(None)) self.assertTrue(row_2._get_mutations(None)) From 871cc168768164b5d778ac8fca7b20c569fa53cb Mon Sep 17 00:00:00 2001 From: Dmitry Timofeev Date: Sun, 21 May 2017 18:19:32 +0100 Subject: [PATCH 06/12] Fix some minor lint warnings. --- bigtable/google/cloud/bigtable/table.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 019cddba2e4d..2e0e839f7d49 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -289,18 +289,18 @@ def read_rows(self, start_key=None, end_key=None, limit=None, def mutate_rows(self, rows): """Mutates multiple rows in bulk. - - The method tries to update all specified rows. + + The method tries to update all specified rows. If some of the rows weren't updated, it would not remove mutations. They can be applied to the row separately. If row mutations finished successfully, they would be cleaned up. - + :type rows: list :param rows: List or other iterable of :class:`.DirectRow` instances. - + :rtype: list - :returns: A list of tuples (``MutateRowsResponse.Entry`` protobuf - corresponding to the errors, :class:`.DirectRow`) + :returns: A list of tuples (``MutateRowsResponse.Entry`` protobuf + corresponding to the errors, :class:`.DirectRow`) """ mutate_rows_request = _mutate_rows_request(self.name, rows) unsuccessfully_mutated_rows = [] @@ -422,7 +422,7 @@ def _mutate_rows_request(table_name, rows): :type rows: list :param rows: List or other iterable of :class:`.DirectRow` instances. - + :rtype: :class:`data_messages_v2_pb2.MutateRowsRequest` :returns: The ``MutateRowsRequest`` protobuf corresponding to the inputs. :raises: :exc:`~.table.TooManyMutationsError` if the number of mutations is @@ -452,7 +452,7 @@ def _check_row_table_name(table_name, row): :type row: :class:`.Row` :param row: An instance of :class:`.Row` subclasses. - + :raises: :exc:`~.table.TableMismatchError` if the row does not belong to the table. """ @@ -464,12 +464,12 @@ def _check_row_table_name(table_name, row): def _check_row_type(row): """Checks that a row is an instance of :class:`.DirectRow`. - + :type row: :class:`.Row` :param row: An instance of :class:`.Row` subclasses. - - :raises: :class:`TypeError ` if the row is not an - instance of DirectRow. + + :raises: :class:`TypeError ` if the row is not an + instance of DirectRow. """ if not isinstance(row, DirectRow): raise TypeError('Bulk processing can not be applied for ' From a2e9196a2d0efb16c713f288302e42e056bdc8db Mon Sep 17 00:00:00 2001 From: Dmitry Timofeev Date: Sun, 21 May 2017 18:32:59 +0100 Subject: [PATCH 07/12] Fix some minor lint warnings. --- bigtable/google/cloud/bigtable/row.py | 1 + bigtable/google/cloud/bigtable/table.py | 9 +++++---- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/bigtable/google/cloud/bigtable/row.py b/bigtable/google/cloud/bigtable/row.py index 3ee00e7f428e..09d12377a49c 100644 --- a/bigtable/google/cloud/bigtable/row.py +++ b/bigtable/google/cloud/bigtable/row.py @@ -73,6 +73,7 @@ def table(self): """ return self._table + class _SetDeleteRow(Row): """Row helper for setting or deleting cell values. diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 2e0e839f7d49..da6ed2bab6d0 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -32,6 +32,7 @@ # Maximum number of mutations in bulk _MAX_BULK_MUTATIONS = 100000 + class TableMismatchError(ValueError): """Row from another table.""" @@ -291,8 +292,8 @@ def mutate_rows(self, rows): """Mutates multiple rows in bulk. The method tries to update all specified rows. - If some of the rows weren't updated, it would not remove mutations. They - can be applied to the row separately. + If some of the rows weren't updated, it would not remove mutations. + They can be applied to the row separately. If row mutations finished successfully, they would be cleaned up. :type rows: list @@ -453,8 +454,8 @@ def _check_row_table_name(table_name, row): :type row: :class:`.Row` :param row: An instance of :class:`.Row` subclasses. - :raises: :exc:`~.table.TableMismatchError` if the row does not belong to the - table. + :raises: :exc:`~.table.TableMismatchError` if the row does not belong to + the table. """ if row.table.name != table_name: raise TableMismatchError( From 8cbf1cd36d32fc0e00445743f8a3e25954afca38 Mon Sep 17 00:00:00 2001 From: Dmitry Timofeev Date: Fri, 26 May 2017 12:24:49 +0100 Subject: [PATCH 08/12] Return a list that is the same size as the input rows. Each element of the list contains a status of the corresponding row mutations. --- bigtable/google/cloud/bigtable/table.py | 14 ++++++-------- bigtable/tests/system.py | 6 ++++-- bigtable/tests/unit/test_table.py | 11 ++++++----- 3 files changed, 16 insertions(+), 15 deletions(-) diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index da6ed2bab6d0..3e76c81d6bce 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -300,21 +300,19 @@ def mutate_rows(self, rows): :param rows: List or other iterable of :class:`.DirectRow` instances. :rtype: list - :returns: A list of tuples (``MutateRowsResponse.Entry`` protobuf - corresponding to the errors, :class:`.DirectRow`) + :returns: A list of corresponding to each row statuses. """ mutate_rows_request = _mutate_rows_request(self.name, rows) - unsuccessfully_mutated_rows = [] client = self._instance._client responses = client._data_stub.MutateRows(mutate_rows_request) + + responses_statuses = [None for _ in range(len(rows))] for response in responses: for entry in response.entries: + responses_statuses[entry.index] = entry.status if entry.status.code == 0: rows[entry.index].clear() - else: - unsuccessfully_mutated_rows.append( - (entry, rows[entry.index])) - return unsuccessfully_mutated_rows + return responses_statuses def sample_row_keys(self): """Read a sample of row keys in the table. @@ -427,7 +425,7 @@ def _mutate_rows_request(table_name, rows): :rtype: :class:`data_messages_v2_pb2.MutateRowsRequest` :returns: The ``MutateRowsRequest`` protobuf corresponding to the inputs. :raises: :exc:`~.table.TooManyMutationsError` if the number of mutations is - grater than 100,000 + greater than 100,000 """ request_pb = data_messages_v2_pb2.MutateRowsRequest(table_name=table_name) mutations_count = 0 diff --git a/bigtable/tests/system.py b/bigtable/tests/system.py index 159a1169a1d4..05a133755295 100644 --- a/bigtable/tests/system.py +++ b/bigtable/tests/system.py @@ -369,8 +369,10 @@ def test_mutate_rows(self): # Change the contents row_1.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL3) row_2.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL4) - result = self._table.mutate_rows(rows) - self.assertFalse(result) + statuses = self._table.mutate_rows(rows) + result = [status.code for status in statuses] + expected_result = [0, 0] + self.assertEqual(result, expected_result) # Check the contents row_1_data = self._table.read_row(ROW_KEY) diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index 465bde9e2cba..be81fc9e2a8c 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -424,18 +424,19 @@ def test_mutate_rows(self): response = MutateRowsResponse() entry_1 = response.entries.add() + entry_1.index = 0 entry_1.status.code = 0 entry_2 = response.entries.add() + entry_2.index = 1 entry_2.status.code = 1 # Patch the stub used by the API method. client._data_stub = _FakeStub([response]) - result = table.mutate_rows([row_1, row_2]) + statuses = table.mutate_rows([row_1, row_2]) + result = [status.code for status in statuses] + expected_result = [0, 1] - self.assertIs(result[0][1], row_1) - self.assertTrue(len(result)) - self.assertFalse(row_1._get_mutations(None)) - self.assertTrue(row_2._get_mutations(None)) + self.assertEqual(result, expected_result) def test_read_rows(self): From 65ad0e46f4c983c301d4d44016cb4d817301a5cd Mon Sep 17 00:00:00 2001 From: Dmitry Timofeev Date: Fri, 9 Jun 2017 16:00:42 +0100 Subject: [PATCH 09/12] Fix tests and some minor code comments --- bigtable/google/cloud/bigtable/table.py | 18 +++-- bigtable/tests/system.py | 29 ++++---- bigtable/tests/unit/test_table.py | 95 +++++++++++++++++++------ 3 files changed, 101 insertions(+), 41 deletions(-) diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 3e76c81d6bce..d2e13fddee74 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -12,7 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""User friendly container for Google Cloud Bigtable Table.""" +"""User-friendly container for Google Cloud Bigtable Table.""" + + +import six from google.cloud._helpers import _to_bytes from google.cloud.bigtable._generated import ( @@ -300,13 +303,16 @@ def mutate_rows(self, rows): :param rows: List or other iterable of :class:`.DirectRow` instances. :rtype: list - :returns: A list of corresponding to each row statuses. + :returns: A list of response statuses (`google.rpc.status_pb2.Status`) + corresponding to success or failure of each row mutation sent. + These will be in the same order as the `rows`. """ mutate_rows_request = _mutate_rows_request(self.name, rows) client = self._instance._client responses = client._data_stub.MutateRows(mutate_rows_request) - responses_statuses = [None for _ in range(len(rows))] + responses_statuses = [ + None for _ in six.moves.xrange(len(mutate_rows_request.entries))] for response in responses: for entry in response.entries: responses_statuses[entry.index] = entry.status @@ -434,17 +440,19 @@ def _mutate_rows_request(table_name, rows): _check_row_type(row) entry = request_pb.entries.add() entry.row_key = row.row_key + # NOTE: Since `_check_row_type` has verified `row` is a `DirectRow`, the + # mutations have no state. for mutation in row._get_mutations(None): mutations_count += 1 entry.mutations.add().CopyFrom(mutation) if mutations_count > _MAX_BULK_MUTATIONS: raise TooManyMutationsError('Maximum number of mutations is %s' % - _MAX_BULK_MUTATIONS) + (_MAX_BULK_MUTATIONS,)) return request_pb def _check_row_table_name(table_name, row): - """Checks that a row belong to the table. + """Checks that a row belongs to a table. :type table_name: str :param table_name: The name of the table. diff --git a/bigtable/tests/system.py b/bigtable/tests/system.py index 05a133755295..1fcda808db39 100644 --- a/bigtable/tests/system.py +++ b/bigtable/tests/system.py @@ -357,30 +357,31 @@ def _write_to_row(self, row1=None, row2=None, row3=None, row4=None): return cell1, cell2, cell3, cell4 def test_mutate_rows(self): - row_1 = self._table.row(ROW_KEY) - row_1.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL1) - row_1.commit() - row_2 = self._table.row(ROW_KEY_ALT) - row_2.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL2) - row_2.commit() - rows = [row_1, row_2] - self.rows_to_delete.extend(rows) + row1 = self._table.row(ROW_KEY) + row1.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL1) + row1.commit() + self.rows_to_delete.append(row1) + row2 = self._table.row(ROW_KEY_ALT) + row2.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL2) + row2.commit() + self.rows_to_delete.append(row2) # Change the contents - row_1.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL3) - row_2.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL4) + row1.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL3) + row2.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL4) + rows = [row1, row2] statuses = self._table.mutate_rows(rows) result = [status.code for status in statuses] expected_result = [0, 0] self.assertEqual(result, expected_result) # Check the contents - row_1_data = self._table.read_row(ROW_KEY) + row1_data = self._table.read_row(ROW_KEY) self.assertEqual( - row_1_data.cells[COLUMN_FAMILY_ID1][COL_NAME1][0].value, CELL_VAL3) - row_2_data = self._table.read_row(ROW_KEY_ALT) + row1_data.cells[COLUMN_FAMILY_ID1][COL_NAME1][0].value, CELL_VAL3) + row2_data = self._table.read_row(ROW_KEY_ALT) self.assertEqual( - row_2_data.cells[COLUMN_FAMILY_ID1][COL_NAME1][0].value, CELL_VAL4) + row2_data.cells[COLUMN_FAMILY_ID1][COL_NAME1][0].value, CELL_VAL4) def test_read_large_cell_limit(self): row = self._table.row(ROW_KEY) diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index be81fc9e2a8c..5867e76aff73 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -15,6 +15,8 @@ import unittest +import mock + class Test___mutate_rows_request(unittest.TestCase): @@ -23,24 +25,51 @@ def _call_fut(self, table_name, rows): return _mutate_rows_request(table_name, rows) + @mock.patch('google.cloud.bigtable.table._MAX_BULK_MUTATIONS', new=3) def test__mutate_rows_too_many_mutations(self): - from collections import namedtuple - from google.cloud.bigtable._generated.data_pb2 import Mutation from google.cloud.bigtable.row import DirectRow - import google.cloud.bigtable.table as table_module from google.cloud.bigtable.table import TooManyMutationsError - table = namedtuple('Table', ['name']) + table = mock.Mock(name='table', spec=['name']) table.name = 'table' - table_module._MAX_BULK_MUTATIONS = 3 - mutation = Mutation() rows = [DirectRow(row_key=b'row_key', table=table), DirectRow(row_key=b'row_key_2', table=table)] - rows[0]._pb_mutations = [mutation, mutation] - rows[1]._pb_mutations = [mutation, mutation] + rows[0].set_cell('cf1', b'c1', 1) + rows[0].set_cell('cf1', b'c1', 2) + rows[1].set_cell('cf1', b'c1', 3) + rows[1].set_cell('cf1', b'c1', 4) with self.assertRaises(TooManyMutationsError): self._call_fut('table', rows) + def test__mutate_rows_request(self): + from google.cloud.bigtable.row import DirectRow + + table = mock.Mock(name='table', spec=['name']) + table.name = 'table' + rows = [DirectRow(row_key=b'row_key', table=table), + DirectRow(row_key=b'row_key_2', table=table)] + rows[0].set_cell('cf1', b'c1', b'1') + rows[1].set_cell('cf1', b'c1', b'2') + result = self._call_fut('table', rows) + + expected_result = _mutate_rows_request_pb(table_name='table') + entry1 = expected_result.entries.add() + entry1.row_key = b'row_key' + mutations1 = entry1.mutations.add() + mutations1.set_cell.family_name = 'cf1' + mutations1.set_cell.column_qualifier = b'c1' + mutations1.set_cell.timestamp_micros = -1 + mutations1.set_cell.value = b'1' + entry2 = expected_result.entries.add() + entry2.row_key = b'row_key_2' + mutations2 = entry2.mutations.add() + mutations2.set_cell.family_name = 'cf1' + mutations2.set_cell.column_qualifier = b'c1' + mutations2.set_cell.timestamp_micros = -1 + mutations2.set_cell.value = b'2' + + self.assertEqual(result, expected_result) + class Test__check_row_table_name(unittest.TestCase): @@ -49,30 +78,45 @@ def _call_fut(self, table_name, row): return _check_row_table_name(table_name, row) - def test__check_rows_wrong_table_name(self): - from collections import namedtuple + def test_wrong_table_name(self): from google.cloud.bigtable.table import TableMismatchError from google.cloud.bigtable.row import DirectRow - table = namedtuple('Table', ['name']) + table = mock.Mock(name='table', spec=['name']) table.name = 'table' row = DirectRow(row_key=b'row_key', table=table) with self.assertRaises(TableMismatchError): self._call_fut('other_table', row) + def test_right_table_name(self): + from google.cloud.bigtable.row import DirectRow + + table = mock.Mock(name='table', spec=['name']) + table.name = 'table' + row = DirectRow(row_key=b'row_key', table=table) + result = self._call_fut('table', row) + self.assertFalse(result) + class Test__check_row_type(unittest.TestCase): - def _call_fut(self, table_name, row): + def _call_fut(self, row): from google.cloud.bigtable.table import _check_row_type - return _check_row_type(table_name, row) + return _check_row_type(row) - def test__check_rows_wrong_row_type(self): + def test_test_wrong_row_type(self): from google.cloud.bigtable.row import ConditionalRow row = ConditionalRow(row_key=b'row_key', table='table', filter_=None) with self.assertRaises(TypeError): - self._call_fut('table', row) + self._call_fut(row) + + def test_right_row_type(self): + from google.cloud.bigtable.row import DirectRow + + row = DirectRow(row_key=b'row_key', table='table') + result = self._call_fut(row) + self.assertFalse(result) class TestTable(unittest.TestCase): @@ -411,6 +455,7 @@ def test_mutate_rows(self): from google.cloud.bigtable._generated.bigtable_pb2 import ( MutateRowsResponse) from google.cloud.bigtable.row import DirectRow + from google.rpc.status_pb2 import Status from tests.unit._testing import _FakeStub client = _Client() @@ -422,13 +467,18 @@ def test_mutate_rows(self): row_2 = DirectRow(row_key=b'row_key_2', table=table) row_2.set_cell('cf', b'col', b'value2') - response = MutateRowsResponse() - entry_1 = response.entries.add() - entry_1.index = 0 - entry_1.status.code = 0 - entry_2 = response.entries.add() - entry_2.index = 1 - entry_2.status.code = 1 + response = MutateRowsResponse( + entries=[ + MutateRowsResponse.Entry( + index=0, + status=Status(code=0), + ), + MutateRowsResponse.Entry( + index=1, + status=Status(code=1), + ), + ], + ) # Patch the stub used by the API method. client._data_stub = _FakeStub([response]) @@ -667,6 +717,7 @@ def _mutate_rows_request_pb(*args, **kw): return data_messages_v2_pb2.MutateRowsRequest(*args, **kw) + def _TablePB(*args, **kw): from google.cloud.bigtable._generated import ( table_pb2 as table_v2_pb2) From d70ef85ec82d1e8468fb97ac66c80123b7391c12 Mon Sep 17 00:00:00 2001 From: Dmitry Timofeev Date: Fri, 9 Jun 2017 16:07:22 +0100 Subject: [PATCH 10/12] Fix lint warnings (string too long) --- bigtable/google/cloud/bigtable/table.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index d2e13fddee74..1c97fbdf8448 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -304,8 +304,8 @@ def mutate_rows(self, rows): :rtype: list :returns: A list of response statuses (`google.rpc.status_pb2.Status`) - corresponding to success or failure of each row mutation sent. - These will be in the same order as the `rows`. + corresponding to success or failure of each row mutation + sent. These will be in the same order as the `rows`. """ mutate_rows_request = _mutate_rows_request(self.name, rows) client = self._instance._client @@ -440,8 +440,8 @@ def _mutate_rows_request(table_name, rows): _check_row_type(row) entry = request_pb.entries.add() entry.row_key = row.row_key - # NOTE: Since `_check_row_type` has verified `row` is a `DirectRow`, the - # mutations have no state. + # NOTE: Since `_check_row_type` has verified `row` is a `DirectRow`, + # the mutations have no state. for mutation in row._get_mutations(None): mutations_count += 1 entry.mutations.add().CopyFrom(mutation) From b31a08b7d423c2e80f42ba39853e8cabde407fe5 Mon Sep 17 00:00:00 2001 From: Dmitry Timofeev Date: Fri, 9 Jun 2017 16:16:51 +0100 Subject: [PATCH 11/12] Link to the BT docs with limit specification. --- bigtable/google/cloud/bigtable/table.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 1c97fbdf8448..5dd7a887d1b0 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -32,7 +32,8 @@ from google.cloud.bigtable.row_data import PartialRowsData -# Maximum number of mutations in bulk +# Maximum number of mutations in bulk (MutateRowsRequest message): +# https://cloud.google.com/bigtable/docs/reference/data/rpc/google.bigtable.v2 _MAX_BULK_MUTATIONS = 100000 From ce995cf49f9cab9fd116f6de0cdcee9cf8af2552 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 9 Jun 2017 12:35:11 -0400 Subject: [PATCH 12/12] Add anchor to the method on the RPC docs page. --- bigtable/google/cloud/bigtable/table.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 5dd7a887d1b0..8dbf8c1ce6fb 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -33,7 +33,7 @@ # Maximum number of mutations in bulk (MutateRowsRequest message): -# https://cloud.google.com/bigtable/docs/reference/data/rpc/google.bigtable.v2 +# https://cloud.google.com/bigtable/docs/reference/data/rpc/google.bigtable.v2#google.bigtable.v2.MutateRowRequest _MAX_BULK_MUTATIONS = 100000