From 9e7a4e3fa3e7496b34930ae8eda6dff6a08abd55 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 20 Oct 2021 13:38:16 -0400 Subject: [PATCH 01/16] tests: refactor 'app_profile' unit tests w/ pytest idioms --- tests/unit/test_app_profile.py | 1236 ++++++++++++++++---------------- 1 file changed, 605 insertions(+), 631 deletions(-) diff --git a/tests/unit/test_app_profile.py b/tests/unit/test_app_profile.py index 6422e87e9..07c686fb8 100644 --- a/tests/unit/test_app_profile.py +++ b/tests/unit/test_app_profile.py @@ -12,650 +12,624 @@ # See the License for the specific language governing permissions and # limitations under the License. - -import unittest - import mock +import pytest from ._testing import _make_credentials +PROJECT = "project" +INSTANCE_ID = "instance-id" +APP_PROFILE_ID = "app-profile-id" +APP_PROFILE_NAME = "projects/{}/instances/{}/appProfiles/{}".format( + PROJECT, INSTANCE_ID, APP_PROFILE_ID +) +CLUSTER_ID = "cluster-id" +OP_ID = 8765 +OP_NAME = "operations/projects/{}/instances/{}/appProfiles/{}/operations/{}".format( + PROJECT, INSTANCE_ID, APP_PROFILE_ID, OP_ID +) + + +def _make_app_profile(*args, **kwargs): + from google.cloud.bigtable.app_profile import AppProfile + + return AppProfile(*args, **kwargs) + + +def _make_client(*args, **kwargs): + from google.cloud.bigtable.client import Client + + return Client(*args, **kwargs) + + +def test_app_profile_constructor_defaults(): + from google.cloud.bigtable.app_profile import AppProfile + + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + + app_profile = _make_app_profile(APP_PROFILE_ID, instance) + assert isinstance(app_profile, AppProfile) + assert app_profile._instance == instance + assert app_profile.routing_policy_type is None + assert app_profile.description is None + assert app_profile.cluster_id is None + assert app_profile.allow_transactional_writes is None + + +def test_app_profile_constructor_explicit(): + from google.cloud.bigtable.enums import RoutingPolicyType + + ANY = RoutingPolicyType.ANY + DESCRIPTION_1 = "routing policy any" + APP_PROFILE_ID_2 = "app-profile-id-2" + SINGLE = RoutingPolicyType.SINGLE + DESCRIPTION_2 = "routing policy single" + ALLOW_WRITES = True + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + + app_profile1 = _make_app_profile( + APP_PROFILE_ID, instance, routing_policy_type=ANY, description=DESCRIPTION_1, + ) + app_profile2 = _make_app_profile( + APP_PROFILE_ID_2, + instance, + routing_policy_type=SINGLE, + description=DESCRIPTION_2, + cluster_id=CLUSTER_ID, + allow_transactional_writes=ALLOW_WRITES, + ) + assert app_profile1.app_profile_id == APP_PROFILE_ID + assert app_profile1._instance is instance + assert app_profile1.routing_policy_type == ANY + assert app_profile1.description == DESCRIPTION_1 + assert app_profile2.app_profile_id == APP_PROFILE_ID_2 + assert app_profile2._instance is instance + assert app_profile2.routing_policy_type == SINGLE + assert app_profile2.description == DESCRIPTION_2 + assert app_profile2.cluster_id == CLUSTER_ID + assert app_profile2.allow_transactional_writes == ALLOW_WRITES + + +def test_app_profile_name(): + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = _Instance(INSTANCE_ID, client) + + app_profile = _make_app_profile(APP_PROFILE_ID, instance) + assert app_profile.name == APP_PROFILE_NAME + + +def test_app_profile___eq__(): + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + app_profile1 = _make_app_profile(APP_PROFILE_ID, instance) + app_profile2 = _make_app_profile(APP_PROFILE_ID, instance) + assert app_profile1 == app_profile2 + + +def test_app_profile___eq___w_type_instance_differ(): + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + alt_instance = _Instance("other-instance", client) + other_object = _Other(APP_PROFILE_ID, instance) + app_profile1 = _make_app_profile(APP_PROFILE_ID, instance) + app_profile2 = _make_app_profile(APP_PROFILE_ID, alt_instance) + assert not (app_profile1 == other_object) + assert not (app_profile1 == app_profile2) + + +def test_app_profile___ne___w_same_value(): + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + app_profile1 = _make_app_profile(APP_PROFILE_ID, instance) + app_profile2 = _make_app_profile(APP_PROFILE_ID, instance) + assert not (app_profile1 != app_profile2) + + +def test_app_profile___ne__(): + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + app_profile1 = _make_app_profile("app_profile_id1", instance) + app_profile2 = _make_app_profile("app_profile_id2", instance) + assert app_profile1 != app_profile2 + + +def test_app_profile_from_pb_success_w_routing_any(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.app_profile import AppProfile + from google.cloud.bigtable.enums import RoutingPolicyType + + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + + desctiption = "routing any" + routing = RoutingPolicyType.ANY + multi_cluster_routing_use_any = data_v2_pb2.AppProfile.MultiClusterRoutingUseAny() + + app_profile_pb = data_v2_pb2.AppProfile( + name=APP_PROFILE_NAME, + description=desctiption, + multi_cluster_routing_use_any=multi_cluster_routing_use_any, + ) + + app_profile = AppProfile.from_pb(app_profile_pb, instance) + assert isinstance(app_profile, AppProfile) + assert app_profile._instance is instance + assert app_profile.app_profile_id == APP_PROFILE_ID + assert app_profile.description == desctiption + assert app_profile.routing_policy_type == routing + assert app_profile.cluster_id is None + assert app_profile.allow_transactional_writes is False + + +def test_app_profile_from_pb_success_w_routing_single(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.app_profile import AppProfile + from google.cloud.bigtable.enums import RoutingPolicyType + + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + + desctiption = "routing single" + allow_transactional_writes = True + routing = RoutingPolicyType.SINGLE + single_cluster_routing = data_v2_pb2.AppProfile.SingleClusterRouting( + cluster_id=CLUSTER_ID, allow_transactional_writes=allow_transactional_writes, + ) + + app_profile_pb = data_v2_pb2.AppProfile( + name=APP_PROFILE_NAME, + description=desctiption, + single_cluster_routing=single_cluster_routing, + ) + + app_profile = AppProfile.from_pb(app_profile_pb, instance) + assert isinstance(app_profile, AppProfile) + assert app_profile._instance is instance + assert app_profile.app_profile_id == APP_PROFILE_ID + assert app_profile.description == desctiption + assert app_profile.routing_policy_type == routing + assert app_profile.cluster_id == CLUSTER_ID + assert app_profile.allow_transactional_writes == allow_transactional_writes + + +def test_app_profile_from_pb_w_bad_app_profile_name(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.app_profile import AppProfile + + bad_app_profile_name = "BAD_NAME" + + app_profile_pb = data_v2_pb2.AppProfile(name=bad_app_profile_name) + + with pytest.raises(ValueError): + AppProfile.from_pb(app_profile_pb, None) + + +def test_app_profile_from_pb_w_instance_id_mistmatch(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.app_profile import AppProfile + + ALT_INSTANCE_ID = "ALT_INSTANCE_ID" + client = _Client(PROJECT) + instance = _Instance(ALT_INSTANCE_ID, client) + assert instance.instance_id == ALT_INSTANCE_ID + + app_profile_pb = data_v2_pb2.AppProfile(name=APP_PROFILE_NAME) + + with pytest.raises(ValueError): + AppProfile.from_pb(app_profile_pb, instance) + + +def test_app_profile_from_pb_w_project_mistmatch(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.app_profile import AppProfile + + ALT_PROJECT = "ALT_PROJECT" + client = _Client(project=ALT_PROJECT) + instance = _Instance(INSTANCE_ID, client) + assert client.project == ALT_PROJECT + + app_profile_pb = data_v2_pb2.AppProfile(name=APP_PROFILE_NAME) + + with pytest.raises(ValueError): + AppProfile.from_pb(app_profile_pb, instance) + + +def test_app_profile_reload_w_routing_any(): + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( + BigtableInstanceAdminClient, + ) + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.enums import RoutingPolicyType + + api = mock.create_autospec(BigtableInstanceAdminClient) + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = _Instance(INSTANCE_ID, client) + + routing = RoutingPolicyType.ANY + description = "routing policy any" + + app_profile = _make_app_profile( + APP_PROFILE_ID, instance, routing_policy_type=routing, description=description, + ) + + # Create response_pb + description_from_server = "routing policy switched to single" + cluster_id_from_server = CLUSTER_ID + allow_transactional_writes = True + single_cluster_routing = data_v2_pb2.AppProfile.SingleClusterRouting( + cluster_id=cluster_id_from_server, + allow_transactional_writes=allow_transactional_writes, + ) + + response_pb = data_v2_pb2.AppProfile( + name=app_profile.name, + single_cluster_routing=single_cluster_routing, + description=description_from_server, + ) + + # Patch the stub used by the API method. + client._instance_admin_client = api + instance_stub = client._instance_admin_client + instance_stub.get_app_profile.side_effect = [response_pb] + + # Create expected_result. + expected_result = None # reload() has no return value. -class TestAppProfile(unittest.TestCase): - - PROJECT = "project" - INSTANCE_ID = "instance-id" - APP_PROFILE_ID = "app-profile-id" - APP_PROFILE_NAME = "projects/{}/instances/{}/appProfiles/{}".format( - PROJECT, INSTANCE_ID, APP_PROFILE_ID - ) - CLUSTER_ID = "cluster-id" - OP_ID = 8765 - OP_NAME = "operations/projects/{}/instances/{}/appProfiles/{}/operations/{}".format( - PROJECT, INSTANCE_ID, APP_PROFILE_ID, OP_ID - ) - - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.app_profile import AppProfile - - return AppProfile - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - @staticmethod - def _get_target_client_class(): - from google.cloud.bigtable.client import Client - - return Client - - def _make_client(self, *args, **kwargs): - return self._get_target_client_class()(*args, **kwargs) - - def test_constructor_defaults(self): - client = _Client(self.PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - - app_profile = self._make_one(self.APP_PROFILE_ID, instance) - self.assertIsInstance(app_profile, self._get_target_class()) - self.assertEqual(app_profile._instance, instance) - self.assertIsNone(app_profile.routing_policy_type) - self.assertIsNone(app_profile.description) - self.assertIsNone(app_profile.cluster_id) - self.assertIsNone(app_profile.allow_transactional_writes) - - def test_constructor_non_defaults(self): - from google.cloud.bigtable.enums import RoutingPolicyType - - ANY = RoutingPolicyType.ANY - DESCRIPTION_1 = "routing policy any" - APP_PROFILE_ID_2 = "app-profile-id-2" - SINGLE = RoutingPolicyType.SINGLE - DESCRIPTION_2 = "routing policy single" - ALLOW_WRITES = True - client = _Client(self.PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - - app_profile1 = self._make_one( - self.APP_PROFILE_ID, - instance, - routing_policy_type=ANY, - description=DESCRIPTION_1, - ) - app_profile2 = self._make_one( - APP_PROFILE_ID_2, - instance, - routing_policy_type=SINGLE, - description=DESCRIPTION_2, - cluster_id=self.CLUSTER_ID, - allow_transactional_writes=ALLOW_WRITES, - ) - self.assertEqual(app_profile1.app_profile_id, self.APP_PROFILE_ID) - self.assertIs(app_profile1._instance, instance) - self.assertEqual(app_profile1.routing_policy_type, ANY) - self.assertEqual(app_profile1.description, DESCRIPTION_1) - self.assertEqual(app_profile2.app_profile_id, APP_PROFILE_ID_2) - self.assertIs(app_profile2._instance, instance) - self.assertEqual(app_profile2.routing_policy_type, SINGLE) - self.assertEqual(app_profile2.description, DESCRIPTION_2) - self.assertEqual(app_profile2.cluster_id, self.CLUSTER_ID) - self.assertEqual(app_profile2.allow_transactional_writes, ALLOW_WRITES) - - def test_name_property(self): - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = _Instance(self.INSTANCE_ID, client) - - app_profile = self._make_one(self.APP_PROFILE_ID, instance) - self.assertEqual(app_profile.name, self.APP_PROFILE_NAME) - - def test___eq__(self): - client = _Client(self.PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - app_profile1 = self._make_one(self.APP_PROFILE_ID, instance) - app_profile2 = self._make_one(self.APP_PROFILE_ID, instance) - self.assertTrue(app_profile1 == app_profile2) - - def test___eq__type_instance_differ(self): - client = _Client(self.PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - alt_instance = _Instance("other-instance", client) - other_object = _Other(self.APP_PROFILE_ID, instance) - app_profile1 = self._make_one(self.APP_PROFILE_ID, instance) - app_profile2 = self._make_one(self.APP_PROFILE_ID, alt_instance) - self.assertFalse(app_profile1 == other_object) - self.assertFalse(app_profile1 == app_profile2) - - def test___ne__same_value(self): - client = _Client(self.PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - app_profile1 = self._make_one(self.APP_PROFILE_ID, instance) - app_profile2 = self._make_one(self.APP_PROFILE_ID, instance) - self.assertFalse(app_profile1 != app_profile2) - - def test___ne__(self): - client = _Client(self.PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - app_profile1 = self._make_one("app_profile_id1", instance) - app_profile2 = self._make_one("app_profile_id2", instance) - self.assertTrue(app_profile1 != app_profile2) - - def test_from_pb_success_routing_any(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable.enums import RoutingPolicyType - - client = _Client(self.PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - - desctiption = "routing any" - routing = RoutingPolicyType.ANY - multi_cluster_routing_use_any = ( - data_v2_pb2.AppProfile.MultiClusterRoutingUseAny() - ) - - app_profile_pb = data_v2_pb2.AppProfile( - name=self.APP_PROFILE_NAME, - description=desctiption, - multi_cluster_routing_use_any=multi_cluster_routing_use_any, - ) - - klass = self._get_target_class() - app_profile = klass.from_pb(app_profile_pb, instance) - self.assertIsInstance(app_profile, klass) - self.assertIs(app_profile._instance, instance) - self.assertEqual(app_profile.app_profile_id, self.APP_PROFILE_ID) - self.assertEqual(app_profile.description, desctiption) - self.assertEqual(app_profile.routing_policy_type, routing) - self.assertIsNone(app_profile.cluster_id) - self.assertEqual(app_profile.allow_transactional_writes, False) - - def test_from_pb_success_routing_single(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable.enums import RoutingPolicyType - - client = _Client(self.PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - - desctiption = "routing single" - allow_transactional_writes = True - routing = RoutingPolicyType.SINGLE - single_cluster_routing = data_v2_pb2.AppProfile.SingleClusterRouting( - cluster_id=self.CLUSTER_ID, - allow_transactional_writes=allow_transactional_writes, - ) - - app_profile_pb = data_v2_pb2.AppProfile( - name=self.APP_PROFILE_NAME, - description=desctiption, - single_cluster_routing=single_cluster_routing, - ) - - klass = self._get_target_class() - app_profile = klass.from_pb(app_profile_pb, instance) - self.assertIsInstance(app_profile, klass) - self.assertIs(app_profile._instance, instance) - self.assertEqual(app_profile.app_profile_id, self.APP_PROFILE_ID) - self.assertEqual(app_profile.description, desctiption) - self.assertEqual(app_profile.routing_policy_type, routing) - self.assertEqual(app_profile.cluster_id, self.CLUSTER_ID) - self.assertEqual( - app_profile.allow_transactional_writes, allow_transactional_writes - ) - - def test_from_pb_bad_app_profile_name(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - - bad_app_profile_name = "BAD_NAME" - - app_profile_pb = data_v2_pb2.AppProfile(name=bad_app_profile_name) - - klass = self._get_target_class() - with self.assertRaises(ValueError): - klass.from_pb(app_profile_pb, None) - - def test_from_pb_instance_id_mistmatch(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - - ALT_INSTANCE_ID = "ALT_INSTANCE_ID" - client = _Client(self.PROJECT) - instance = _Instance(ALT_INSTANCE_ID, client) - self.assertEqual(instance.instance_id, ALT_INSTANCE_ID) - - app_profile_pb = data_v2_pb2.AppProfile(name=self.APP_PROFILE_NAME) - - klass = self._get_target_class() - with self.assertRaises(ValueError): - klass.from_pb(app_profile_pb, instance) - - def test_from_pb_project_mistmatch(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - - ALT_PROJECT = "ALT_PROJECT" - client = _Client(project=ALT_PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - self.assertEqual(client.project, ALT_PROJECT) - - app_profile_pb = data_v2_pb2.AppProfile(name=self.APP_PROFILE_NAME) - - klass = self._get_target_class() - with self.assertRaises(ValueError): - klass.from_pb(app_profile_pb, instance) - - def test_reload_routing_any(self): - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable.enums import RoutingPolicyType - - api = mock.create_autospec(BigtableInstanceAdminClient) - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = _Instance(self.INSTANCE_ID, client) - - routing = RoutingPolicyType.ANY - description = "routing policy any" - - app_profile = self._make_one( - self.APP_PROFILE_ID, - instance, - routing_policy_type=routing, - description=description, - ) - - # Create response_pb - description_from_server = "routing policy switched to single" - cluster_id_from_server = self.CLUSTER_ID - allow_transactional_writes = True - single_cluster_routing = data_v2_pb2.AppProfile.SingleClusterRouting( - cluster_id=cluster_id_from_server, - allow_transactional_writes=allow_transactional_writes, - ) - - response_pb = data_v2_pb2.AppProfile( - name=app_profile.name, - single_cluster_routing=single_cluster_routing, - description=description_from_server, - ) - - # Patch the stub used by the API method. - client._instance_admin_client = api - instance_stub = client._instance_admin_client - instance_stub.get_app_profile.side_effect = [response_pb] - - # Create expected_result. - expected_result = None # reload() has no return value. - - # Check app_profile config values before. - self.assertEqual(app_profile.routing_policy_type, routing) - self.assertEqual(app_profile.description, description) - self.assertIsNone(app_profile.cluster_id) - self.assertIsNone(app_profile.allow_transactional_writes) - - # Perform the method and check the result. - result = app_profile.reload() - self.assertEqual(result, expected_result) - self.assertEqual(app_profile.routing_policy_type, RoutingPolicyType.SINGLE) - self.assertEqual(app_profile.description, description_from_server) - self.assertEqual(app_profile.cluster_id, cluster_id_from_server) - self.assertEqual( - app_profile.allow_transactional_writes, allow_transactional_writes - ) - - def test_exists(self): - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.api_core import exceptions - - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = client.instance(self.INSTANCE_ID) - - # Create response_pb - response_pb = data_v2_pb2.AppProfile(name=self.APP_PROFILE_NAME) - client._instance_admin_client = instance_api - - # Patch the stub used by the API method. - client._instance_admin_client = instance_api - instance_stub = client._instance_admin_client - instance_stub.get_app_profile.side_effect = [ - response_pb, - exceptions.NotFound("testing"), - exceptions.BadRequest("testing"), - ] - - # Perform the method and check the result. - non_existing_app_profile_id = "other-app-profile-id" - app_profile = self._make_one(self.APP_PROFILE_ID, instance) - alt_app_profile = self._make_one(non_existing_app_profile_id, instance) - self.assertTrue(app_profile.exists()) - self.assertFalse(alt_app_profile.exists()) - with self.assertRaises(exceptions.BadRequest): - alt_app_profile.exists() - - def test_create_routing_any(self): - from google.cloud.bigtable.enums import RoutingPolicyType - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = client.instance(self.INSTANCE_ID) - - routing = RoutingPolicyType.ANY - description = "routing policy any" - ignore_warnings = True - - app_profile = self._make_one( - self.APP_PROFILE_ID, - instance, - routing_policy_type=routing, - description=description, - ) - - expected_request_app_profile = app_profile._to_pb() - name = instance.name - expected_request = { - "request": { - "parent": name, - "app_profile_id": self.APP_PROFILE_ID, - "app_profile": expected_request_app_profile, - "ignore_warnings": ignore_warnings, - } + # Check app_profile config values before. + assert app_profile.routing_policy_type == routing + assert app_profile.description == description + assert app_profile.cluster_id is None + assert app_profile.allow_transactional_writes is None + + # Perform the method and check the result. + result = app_profile.reload() + assert result == expected_result + assert app_profile.routing_policy_type == RoutingPolicyType.SINGLE + assert app_profile.description == description_from_server + assert app_profile.cluster_id == cluster_id_from_server + assert app_profile.allow_transactional_writes == allow_transactional_writes + + +def test_app_profile_exists(): + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( + BigtableInstanceAdminClient, + ) + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.api_core import exceptions + + instance_api = mock.create_autospec(BigtableInstanceAdminClient) + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = client.instance(INSTANCE_ID) + + # Create response_pb + response_pb = data_v2_pb2.AppProfile(name=APP_PROFILE_NAME) + client._instance_admin_client = instance_api + + # Patch the stub used by the API method. + client._instance_admin_client = instance_api + instance_stub = client._instance_admin_client + instance_stub.get_app_profile.side_effect = [ + response_pb, + exceptions.NotFound("testing"), + exceptions.BadRequest("testing"), + ] + + # Perform the method and check the result. + non_existing_app_profile_id = "other-app-profile-id" + app_profile = _make_app_profile(APP_PROFILE_ID, instance) + alt_app_profile = _make_app_profile(non_existing_app_profile_id, instance) + assert app_profile.exists() + assert not alt_app_profile.exists() + with pytest.raises(exceptions.BadRequest): + alt_app_profile.exists() + + +def test_app_profile_create_w_routing_any(): + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( + BigtableInstanceAdminClient, + ) + from google.cloud.bigtable.app_profile import AppProfile + from google.cloud.bigtable.enums import RoutingPolicyType + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = client.instance(INSTANCE_ID) + + routing = RoutingPolicyType.ANY + description = "routing policy any" + ignore_warnings = True + + app_profile = _make_app_profile( + APP_PROFILE_ID, instance, routing_policy_type=routing, description=description, + ) + + expected_request_app_profile = app_profile._to_pb() + name = instance.name + expected_request = { + "request": { + "parent": name, + "app_profile_id": APP_PROFILE_ID, + "app_profile": expected_request_app_profile, + "ignore_warnings": ignore_warnings, } + } - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - instance_api.app_profile_path.return_value = ( - "projects/project/instances/instance-id/appProfiles/app-profile-id" - ) - instance_api.instance_path.return_value = name - instance_api.create_app_profile.return_value = expected_request_app_profile - - # Patch the stub used by the API method. - client._instance_admin_client = instance_api - app_profile._instance._client._instance_admin_client = instance_api - # Perform the method and check the result. - result = app_profile.create(ignore_warnings) - - actual_request = client._instance_admin_client.create_app_profile.call_args_list[ - 0 - ].kwargs - - self.assertEqual(actual_request, expected_request) - self.assertIsInstance(result, self._get_target_class()) - self.assertEqual(result.app_profile_id, self.APP_PROFILE_ID) - self.assertIs(result._instance, instance) - self.assertEqual(result.routing_policy_type, routing) - self.assertEqual(result.description, description) - self.assertEqual(result.allow_transactional_writes, False) - self.assertIsNone(result.cluster_id) - - def test_create_routing_single(self): - from google.cloud.bigtable.enums import RoutingPolicyType - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = client.instance(self.INSTANCE_ID) - - routing = RoutingPolicyType.SINGLE - description = "routing policy single" - allow_writes = False - ignore_warnings = True - - app_profile = self._make_one( - self.APP_PROFILE_ID, - instance, - routing_policy_type=routing, - description=description, - cluster_id=self.CLUSTER_ID, - allow_transactional_writes=allow_writes, - ) - expected_request_app_profile = app_profile._to_pb() - instance_name = instance.name - expected_request = { - "request": { - "parent": instance_name, - "app_profile_id": self.APP_PROFILE_ID, - "app_profile": expected_request_app_profile, - "ignore_warnings": ignore_warnings, - } + instance_api = mock.create_autospec(BigtableInstanceAdminClient) + instance_api.app_profile_path.return_value = ( + "projects/project/instances/instance-id/appProfiles/app-profile-id" + ) + instance_api.instance_path.return_value = name + instance_api.create_app_profile.return_value = expected_request_app_profile + + # Patch the stub used by the API method. + client._instance_admin_client = instance_api + app_profile._instance._client._instance_admin_client = instance_api + # Perform the method and check the result. + result = app_profile.create(ignore_warnings) + + actual_request = client._instance_admin_client.create_app_profile.call_args_list[ + 0 + ].kwargs + + assert actual_request == expected_request + assert isinstance(result, AppProfile) + assert result.app_profile_id == APP_PROFILE_ID + assert result._instance is instance + assert result.routing_policy_type == routing + assert result.description == description + assert result.allow_transactional_writes is False + assert result.cluster_id is None + + +def test_app_profile_create_w_routing_single(): + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( + BigtableInstanceAdminClient, + ) + from google.cloud.bigtable.app_profile import AppProfile + from google.cloud.bigtable.enums import RoutingPolicyType + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = client.instance(INSTANCE_ID) + + routing = RoutingPolicyType.SINGLE + description = "routing policy single" + allow_writes = False + ignore_warnings = True + + app_profile = _make_app_profile( + APP_PROFILE_ID, + instance, + routing_policy_type=routing, + description=description, + cluster_id=CLUSTER_ID, + allow_transactional_writes=allow_writes, + ) + expected_request_app_profile = app_profile._to_pb() + instance_name = instance.name + expected_request = { + "request": { + "parent": instance_name, + "app_profile_id": APP_PROFILE_ID, + "app_profile": expected_request_app_profile, + "ignore_warnings": ignore_warnings, } + } + + # Patch the stub used by the API method. + instance_api = mock.create_autospec(BigtableInstanceAdminClient) + instance_api.app_profile_path.return_value = ( + "projects/project/instances/instance-id/appProfiles/app-profile-id" + ) + instance_api.instance_path.return_value = instance_name + instance_api.create_app_profile.return_value = expected_request_app_profile + client._instance_admin_client = instance_api + # Perform the method and check the result. + result = app_profile.create(ignore_warnings) + + actual_request = client._instance_admin_client.create_app_profile.call_args_list[ + 0 + ].kwargs + + assert actual_request == expected_request + assert isinstance(result, AppProfile) + assert result.app_profile_id == APP_PROFILE_ID + assert result._instance is instance + assert result.routing_policy_type == routing + assert result.description == description + assert result.allow_transactional_writes == allow_writes + assert result.cluster_id == CLUSTER_ID + + +def test_app_profile_create_w_wrong_routing_policy(): + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = client.instance(INSTANCE_ID) + app_profile = _make_app_profile(APP_PROFILE_ID, instance, routing_policy_type=None) + with pytest.raises(ValueError): + app_profile.create() + + +def test_app_profile_update_w_routing_any(): + from google.longrunning import operations_pb2 + from google.protobuf.any_pb2 import Any + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) + from google.cloud.bigtable.enums import RoutingPolicyType + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( + BigtableInstanceAdminClient, + ) + from google.protobuf import field_mask_pb2 + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = client.instance(INSTANCE_ID) + + routing = RoutingPolicyType.SINGLE + description = "to routing policy single" + allow_writes = True + app_profile = _make_app_profile( + APP_PROFILE_ID, + instance, + routing_policy_type=routing, + description=description, + cluster_id=CLUSTER_ID, + allow_transactional_writes=allow_writes, + ) + + # Create response_pb + metadata = messages_v2_pb2.UpdateAppProfileMetadata() + type_url = "type.googleapis.com/{}".format( + messages_v2_pb2.UpdateAppProfileMetadata._meta._pb.DESCRIPTOR.full_name + ) + response_pb = operations_pb2.Operation( + name=OP_NAME, + metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), + ) + + # Patch the stub used by the API method. + instance_api = mock.create_autospec(BigtableInstanceAdminClient) + # Mock api calls + instance_api.app_profile_path.return_value = ( + "projects/project/instances/instance-id/appProfiles/app-profile-id" + ) + + client._instance_admin_client = instance_api + + # Perform the method and check the result. + ignore_warnings = True + expected_request_update_mask = field_mask_pb2.FieldMask( + paths=["description", "single_cluster_routing"] + ) - # Patch the stub used by the API method. - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - instance_api.app_profile_path.return_value = ( - "projects/project/instances/instance-id/appProfiles/app-profile-id" - ) - instance_api.instance_path.return_value = instance_name - instance_api.create_app_profile.return_value = expected_request_app_profile - client._instance_admin_client = instance_api - # Perform the method and check the result. - result = app_profile.create(ignore_warnings) - - actual_request = client._instance_admin_client.create_app_profile.call_args_list[ - 0 - ].kwargs - - self.assertEqual(actual_request, expected_request) - self.assertIsInstance(result, self._get_target_class()) - self.assertEqual(result.app_profile_id, self.APP_PROFILE_ID) - self.assertIs(result._instance, instance) - self.assertEqual(result.routing_policy_type, routing) - self.assertEqual(result.description, description) - self.assertEqual(result.allow_transactional_writes, allow_writes) - self.assertEqual(result.cluster_id, self.CLUSTER_ID) - - def test_create_app_profile_with_wrong_routing_policy(self): - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = client.instance(self.INSTANCE_ID) - app_profile = self._make_one( - self.APP_PROFILE_ID, instance, routing_policy_type=None - ) - with self.assertRaises(ValueError): - app_profile.create() - - def test_update_app_profile_routing_any(self): - from google.longrunning import operations_pb2 - from google.protobuf.any_pb2 import Any - from google.cloud.bigtable_admin_v2.types import ( - bigtable_instance_admin as messages_v2_pb2, - ) - from google.cloud.bigtable.enums import RoutingPolicyType - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.protobuf import field_mask_pb2 - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = client.instance(self.INSTANCE_ID) - - routing = RoutingPolicyType.SINGLE - description = "to routing policy single" - allow_writes = True - app_profile = self._make_one( - self.APP_PROFILE_ID, - instance, - routing_policy_type=routing, - description=description, - cluster_id=self.CLUSTER_ID, - allow_transactional_writes=allow_writes, - ) - - # Create response_pb - metadata = messages_v2_pb2.UpdateAppProfileMetadata() - type_url = "type.googleapis.com/{}".format( - messages_v2_pb2.UpdateAppProfileMetadata._meta._pb.DESCRIPTOR.full_name - ) - response_pb = operations_pb2.Operation( - name=self.OP_NAME, - metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), - ) - - # Patch the stub used by the API method. - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - # Mock api calls - instance_api.app_profile_path.return_value = ( - "projects/project/instances/instance-id/appProfiles/app-profile-id" - ) - - client._instance_admin_client = instance_api - - # Perform the method and check the result. - ignore_warnings = True - expected_request_update_mask = field_mask_pb2.FieldMask( - paths=["description", "single_cluster_routing"] - ) - - expected_request = { - "request": { - "app_profile": app_profile._to_pb(), - "update_mask": expected_request_update_mask, - "ignore_warnings": ignore_warnings, - } + expected_request = { + "request": { + "app_profile": app_profile._to_pb(), + "update_mask": expected_request_update_mask, + "ignore_warnings": ignore_warnings, } + } + + instance_api.update_app_profile.return_value = response_pb + app_profile._instance._client._instance_admin_client = instance_api + result = app_profile.update(ignore_warnings=ignore_warnings) + actual_request = client._instance_admin_client.update_app_profile.call_args_list[ + 0 + ].kwargs + + assert actual_request == expected_request + assert ( + result.metadata.type_url + == "type.googleapis.com/google.bigtable.admin.v2.UpdateAppProfileMetadata" + ) + - instance_api.update_app_profile.return_value = response_pb - app_profile._instance._client._instance_admin_client = instance_api - result = app_profile.update(ignore_warnings=ignore_warnings) - actual_request = client._instance_admin_client.update_app_profile.call_args_list[ - 0 - ].kwargs - - self.assertEqual(actual_request, expected_request) - self.assertEqual( - result.metadata.type_url, - "type.googleapis.com/google.bigtable.admin.v2.UpdateAppProfileMetadata", - ) - - def test_update_app_profile_routing_single(self): - from google.longrunning import operations_pb2 - from google.protobuf.any_pb2 import Any - from google.cloud.bigtable_admin_v2.types import ( - bigtable_instance_admin as messages_v2_pb2, - ) - from google.cloud.bigtable.enums import RoutingPolicyType - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.protobuf import field_mask_pb2 - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = client.instance(self.INSTANCE_ID) - - routing = RoutingPolicyType.ANY - app_profile = self._make_one( - self.APP_PROFILE_ID, instance, routing_policy_type=routing - ) - - # Create response_pb - metadata = messages_v2_pb2.UpdateAppProfileMetadata() - type_url = "type.googleapis.com/{}".format( - messages_v2_pb2.UpdateAppProfileMetadata._meta._pb.DESCRIPTOR.full_name - ) - response_pb = operations_pb2.Operation( - name=self.OP_NAME, - metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), - ) - - # Patch the stub used by the API method. - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - # Mock api calls - instance_api.app_profile_path.return_value = ( - "projects/project/instances/instance-id/appProfiles/app-profile-id" - ) - client._instance_admin_client = instance_api - client._instance_admin_client.update_app_profile.return_value = response_pb - # Perform the method and check the result. - ignore_warnings = True - expected_request_update_mask = field_mask_pb2.FieldMask( - paths=["multi_cluster_routing_use_any"] - ) - expected_request = { - "request": { - "app_profile": app_profile._to_pb(), - "update_mask": expected_request_update_mask, - "ignore_warnings": ignore_warnings, - } +def test_app_profile_update_w_routing_single(): + from google.longrunning import operations_pb2 + from google.protobuf.any_pb2 import Any + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) + from google.cloud.bigtable.enums import RoutingPolicyType + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( + BigtableInstanceAdminClient, + ) + from google.protobuf import field_mask_pb2 + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = client.instance(INSTANCE_ID) + + routing = RoutingPolicyType.ANY + app_profile = _make_app_profile( + APP_PROFILE_ID, instance, routing_policy_type=routing + ) + + # Create response_pb + metadata = messages_v2_pb2.UpdateAppProfileMetadata() + type_url = "type.googleapis.com/{}".format( + messages_v2_pb2.UpdateAppProfileMetadata._meta._pb.DESCRIPTOR.full_name + ) + response_pb = operations_pb2.Operation( + name=OP_NAME, + metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), + ) + + # Patch the stub used by the API method. + instance_api = mock.create_autospec(BigtableInstanceAdminClient) + # Mock api calls + instance_api.app_profile_path.return_value = ( + "projects/project/instances/instance-id/appProfiles/app-profile-id" + ) + client._instance_admin_client = instance_api + client._instance_admin_client.update_app_profile.return_value = response_pb + # Perform the method and check the result. + ignore_warnings = True + expected_request_update_mask = field_mask_pb2.FieldMask( + paths=["multi_cluster_routing_use_any"] + ) + expected_request = { + "request": { + "app_profile": app_profile._to_pb(), + "update_mask": expected_request_update_mask, + "ignore_warnings": ignore_warnings, } + } + + result = app_profile.update(ignore_warnings=ignore_warnings) + actual_request = client._instance_admin_client.update_app_profile.call_args_list[ + 0 + ].kwargs + assert actual_request == expected_request + assert ( + result.metadata.type_url + == "type.googleapis.com/google.bigtable.admin.v2.UpdateAppProfileMetadata" + ) + + +def test_app_profile_update_w_wrong_routing_policy(): + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = client.instance(INSTANCE_ID) + app_profile = _make_app_profile(APP_PROFILE_ID, instance, routing_policy_type=None) + with pytest.raises(ValueError): + app_profile.update() + + +def test_app_profile_delete(): + from google.protobuf import empty_pb2 + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( + BigtableInstanceAdminClient, + ) + + instance_api = mock.create_autospec(BigtableInstanceAdminClient) + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = client.instance(INSTANCE_ID) + app_profile = _make_app_profile(APP_PROFILE_ID, instance) + + # Create response_pb + response_pb = empty_pb2.Empty() + + # Patch the stub used by the API method. + client._instance_admin_client = instance_api + instance_stub = client._instance_admin_client.transport + instance_stub.delete_cluster.side_effect = [response_pb] + + # Create expected_result. + expected_result = None # delete() has no return value. + + # Perform the method and check the result. + result = app_profile.delete() - result = app_profile.update(ignore_warnings=ignore_warnings) - actual_request = client._instance_admin_client.update_app_profile.call_args_list[ - 0 - ].kwargs - self.assertEqual(actual_request, expected_request) - self.assertEqual( - result.metadata.type_url, - "type.googleapis.com/google.bigtable.admin.v2.UpdateAppProfileMetadata", - ) - - def test_update_app_profile_with_wrong_routing_policy(self): - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = client.instance(self.INSTANCE_ID) - app_profile = self._make_one( - self.APP_PROFILE_ID, instance, routing_policy_type=None - ) - with self.assertRaises(ValueError): - app_profile.update() - - def test_delete(self): - from google.protobuf import empty_pb2 - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = client.instance(self.INSTANCE_ID) - app_profile = self._make_one(self.APP_PROFILE_ID, instance) - - # Create response_pb - response_pb = empty_pb2.Empty() - - # Patch the stub used by the API method. - client._instance_admin_client = instance_api - instance_stub = client._instance_admin_client.transport - instance_stub.delete_cluster.side_effect = [response_pb] - - # Create expected_result. - expected_result = None # delete() has no return value. - - # Perform the method and check the result. - result = app_profile.delete() - - self.assertEqual(result, expected_result) + assert result == expected_result class _Client(object): From 14fd9d95a0e2bebfee8b0ad0daa98a49fa0464cc Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 20 Oct 2021 13:58:21 -0400 Subject: [PATCH 02/16] tests: refactor 'backup' unit tests w/ pytest idioms --- tests/unit/test_backup.py | 1702 +++++++++++++++++++------------------ 1 file changed, 857 insertions(+), 845 deletions(-) diff --git a/tests/unit/test_backup.py b/tests/unit/test_backup.py index a32e18adb..92e9d7307 100644 --- a/tests/unit/test_backup.py +++ b/tests/unit/test_backup.py @@ -14,881 +14,893 @@ import datetime + import mock -import unittest +import pytest from ._testing import _make_credentials from google.cloud._helpers import UTC +PROJECT_ID = "project-id" +INSTANCE_ID = "instance-id" +INSTANCE_NAME = "projects/" + PROJECT_ID + "/instances/" + INSTANCE_ID +CLUSTER_ID = "cluster-id" +CLUSTER_NAME = INSTANCE_NAME + "/clusters/" + CLUSTER_ID +TABLE_ID = "table-id" +TABLE_NAME = INSTANCE_NAME + "/tables/" + TABLE_ID +BACKUP_ID = "backup-id" +BACKUP_NAME = CLUSTER_NAME + "/backups/" + BACKUP_ID + +ALT_INSTANCE = "other-instance-id" +ALT_INSTANCE_NAME = "projects/" + PROJECT_ID + "/instances/" + ALT_INSTANCE +ALT_CLUSTER_NAME = ALT_INSTANCE_NAME + "/clusters/" + CLUSTER_ID +ALT_BACKUP_NAME = ALT_CLUSTER_NAME + "/backups/" + BACKUP_ID + + +def _make_timestamp(): + return datetime.datetime.utcnow().replace(tzinfo=UTC) + + +def _make_table_admin_client(): + from google.cloud.bigtable_admin_v2 import BigtableTableAdminClient + + return mock.create_autospec(BigtableTableAdminClient, instance=True) + + +def _make_backup(*args, **kwargs): + from google.cloud.bigtable.backup import Backup + + return Backup(*args, **kwargs) + + +def test_backup_constructor_defaults(): + instance = _Instance(INSTANCE_NAME) + backup = _make_backup(BACKUP_ID, instance) + + assert backup.backup_id == BACKUP_ID + assert backup._instance is instance + assert backup._cluster is None + assert backup.table_id is None + assert backup._expire_time is None + + assert backup._parent is None + assert backup._source_table is None + assert backup._start_time is None + assert backup._end_time is None + assert backup._size_bytes is None + assert backup._state is None + assert backup._encryption_info is None + + +def test_backup_constructor_explicit(): + instance = _Instance(INSTANCE_NAME) + expire_time = _make_timestamp() + + backup = _make_backup( + BACKUP_ID, + instance, + cluster_id=CLUSTER_ID, + table_id=TABLE_ID, + expire_time=expire_time, + encryption_info="encryption_info", + ) + + assert backup.backup_id == BACKUP_ID + assert backup._instance is instance + assert backup._cluster is CLUSTER_ID + assert backup.table_id == TABLE_ID + assert backup._expire_time == expire_time + assert backup._encryption_info == "encryption_info" + + assert backup._parent is None + assert backup._source_table is None + assert backup._start_time is None + assert backup._end_time is None + assert backup._size_bytes is None + assert backup._state is None + + +def test_backup_from_pb_w_project_mismatch(): + from google.cloud.bigtable_admin_v2.types import table + from google.cloud.bigtable.backup import Backup + + alt_project_id = "alt-project-id" + client = _Client(project=alt_project_id) + instance = _Instance(INSTANCE_NAME, client) + backup_pb = table.Backup(name=BACKUP_NAME) + + with pytest.raises(ValueError): + Backup.from_pb(backup_pb, instance) + + +def test_backup_from_pb_w_instance_mismatch(): + from google.cloud.bigtable_admin_v2.types import table + from google.cloud.bigtable.backup import Backup + + alt_instance = "/projects/%s/instances/alt-instance" % PROJECT_ID + client = _Client() + instance = _Instance(alt_instance, client) + backup_pb = table.Backup(name=BACKUP_NAME) + + with pytest.raises(ValueError): + Backup.from_pb(backup_pb, instance) + + +def test_backup_from_pb_w_bad_name(): + from google.cloud.bigtable_admin_v2.types import table + from google.cloud.bigtable.backup import Backup + + client = _Client() + instance = _Instance(INSTANCE_NAME, client) + backup_pb = table.Backup(name="invalid_name") + + with pytest.raises(ValueError): + Backup.from_pb(backup_pb, instance) + + +def test_backup_from_pb_success(): + from google.cloud.bigtable.encryption_info import EncryptionInfo + from google.cloud.bigtable.error import Status + from google.cloud.bigtable_admin_v2.types import table + from google.cloud.bigtable.backup import Backup + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.rpc.code_pb2 import Code + + client = _Client() + instance = _Instance(INSTANCE_NAME, client) + timestamp = _datetime_to_pb_timestamp(_make_timestamp()) + size_bytes = 1234 + state = table.Backup.State.READY + GOOGLE_DEFAULT_ENCRYPTION = ( + table.EncryptionInfo.EncryptionType.GOOGLE_DEFAULT_ENCRYPTION + ) + backup_pb = table.Backup( + name=BACKUP_NAME, + source_table=TABLE_NAME, + expire_time=timestamp, + start_time=timestamp, + end_time=timestamp, + size_bytes=size_bytes, + state=state, + encryption_info=table.EncryptionInfo( + encryption_type=GOOGLE_DEFAULT_ENCRYPTION, + encryption_status=_StatusPB(Code.OK, "Status OK"), + kms_key_version="2", + ), + ) + + backup = Backup.from_pb(backup_pb, instance) + + assert isinstance(backup, Backup) + assert backup._instance == instance + assert backup.backup_id == BACKUP_ID + assert backup.cluster == CLUSTER_ID + assert backup.table_id == TABLE_ID + assert backup._expire_time == timestamp + assert backup.start_time == timestamp + assert backup.end_time == timestamp + assert backup._size_bytes == size_bytes + assert backup._state == state + expected_info = EncryptionInfo( + encryption_type=GOOGLE_DEFAULT_ENCRYPTION, + encryption_status=Status(_StatusPB(Code.OK, "Status OK")), + kms_key_version="2", + ) + assert backup.encryption_info == expected_info + + +def test_backup_name(): + from google.cloud.bigtable.client import Client + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( + BigtableInstanceAdminClient, + ) + + api = mock.create_autospec(BigtableInstanceAdminClient) + credentials = _make_credentials() + client = Client(project=PROJECT_ID, credentials=credentials, admin=True) + client._table_admin_client = api + instance = _Instance(INSTANCE_NAME, client) + + backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID) + assert backup.name == BACKUP_NAME + + +def test_backup_cluster(): + backup = _make_backup(BACKUP_ID, _Instance(INSTANCE_NAME), cluster_id=CLUSTER_ID) + assert backup.cluster == CLUSTER_ID + + +def test_backup_cluster_setter(): + backup = _make_backup(BACKUP_ID, _Instance(INSTANCE_NAME)) + backup.cluster = CLUSTER_ID + assert backup.cluster == CLUSTER_ID + + +def test_backup_parent_none(): + backup = _make_backup(BACKUP_ID, _Instance(INSTANCE_NAME),) + assert backup.parent is None + + +def test_backup_parent_w_cluster(): + from google.cloud.bigtable.client import Client + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( + BigtableInstanceAdminClient, + ) + + api = mock.create_autospec(BigtableInstanceAdminClient) + credentials = _make_credentials() + client = Client(project=PROJECT_ID, credentials=credentials, admin=True) + client._table_admin_client = api + instance = _Instance(INSTANCE_NAME, client) + + backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID) + assert backup._cluster == CLUSTER_ID + assert backup.parent == CLUSTER_NAME + + +def test_backup_source_table_none(): + from google.cloud.bigtable.client import Client + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( + BigtableInstanceAdminClient, + ) + + api = mock.create_autospec(BigtableInstanceAdminClient) + credentials = _make_credentials() + client = Client(project=PROJECT_ID, credentials=credentials, admin=True) + client._table_admin_client = api + instance = _Instance(INSTANCE_NAME, client) + + backup = _make_backup(BACKUP_ID, instance) + assert backup.source_table is None + + +def test_backup_source_table_valid(): + from google.cloud.bigtable.client import Client + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( + BigtableInstanceAdminClient, + ) + + api = mock.create_autospec(BigtableInstanceAdminClient) + credentials = _make_credentials() + client = Client(project=PROJECT_ID, credentials=credentials, admin=True) + client._table_admin_client = api + instance = _Instance(INSTANCE_NAME, client) + + backup = _make_backup(BACKUP_ID, instance, table_id=TABLE_ID) + assert backup.source_table == TABLE_NAME + + +def test_backup_expire_time(): + instance = _Instance(INSTANCE_NAME) + expire_time = _make_timestamp() + backup = _make_backup(BACKUP_ID, instance, expire_time=expire_time) + assert backup.expire_time == expire_time + + +def test_backup_expire_time_setter(): + instance = _Instance(INSTANCE_NAME) + expire_time = _make_timestamp() + backup = _make_backup(BACKUP_ID, instance) + backup.expire_time = expire_time + assert backup.expire_time == expire_time + + +def test_backup_start_time(): + instance = _Instance(INSTANCE_NAME) + backup = _make_backup(BACKUP_ID, instance) + expected = backup._start_time = _make_timestamp() + assert backup.start_time == expected + + +def test_backup_end_time(): + instance = _Instance(INSTANCE_NAME) + backup = _make_backup(BACKUP_ID, instance) + expected = backup._end_time = _make_timestamp() + assert backup.end_time == expected + + +def test_backup_size(): + instance = _Instance(INSTANCE_NAME) + backup = _make_backup(BACKUP_ID, instance) + expected = backup._size_bytes = 10 + assert backup.size_bytes == expected + + +def test_backup_state(): + from google.cloud.bigtable_admin_v2.types import table + + instance = _Instance(INSTANCE_NAME) + backup = _make_backup(BACKUP_ID, instance) + expected = backup._state = table.Backup.State.READY + assert backup.state == expected + + +def test_backup___eq__(): + instance = object() + backup1 = _make_backup(BACKUP_ID, instance) + backup2 = _make_backup(BACKUP_ID, instance) + assert backup1 == backup2 + + +def test_backup___eq___w_different_types(): + instance = object() + backup1 = _make_backup(BACKUP_ID, instance) + backup2 = object() + assert not (backup1 == backup2) + + +def test_backup___ne___w_same_value(): + instance = object() + backup1 = _make_backup(BACKUP_ID, instance) + backup2 = _make_backup(BACKUP_ID, instance) + assert not (backup1 != backup2) + + +def test_backup___ne__(): + backup1 = _make_backup("backup_1", "instance1") + backup2 = _make_backup("backup_2", "instance2") + assert backup1 != backup2 + + +def test_backup_create_w_grpc_error(): + from google.api_core.exceptions import GoogleAPICallError + from google.api_core.exceptions import Unknown + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import table + + client = _Client() + api = client.table_admin_client = _make_table_admin_client() + api.create_backup.side_effect = Unknown("testing") + + timestamp = _make_timestamp() + backup = _make_backup( + BACKUP_ID, + _Instance(INSTANCE_NAME, client=client), + table_id=TABLE_ID, + expire_time=timestamp, + ) + + backup_pb = table.Backup( + source_table=TABLE_NAME, expire_time=_datetime_to_pb_timestamp(timestamp), + ) + + with pytest.raises(GoogleAPICallError): + backup.create(CLUSTER_ID) + + api.create_backup.assert_called_once_with( + request={"parent": CLUSTER_NAME, "backup_id": BACKUP_ID, "backup": backup_pb} + ) + + +def test_backup_create_w_already_exists(): + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import table + from google.cloud.exceptions import Conflict + + client = _Client() + api = client.table_admin_client = _make_table_admin_client() + api.create_backup.side_effect = Conflict("testing") + + timestamp = _make_timestamp() + backup = _make_backup( + BACKUP_ID, + _Instance(INSTANCE_NAME, client=client), + table_id=TABLE_ID, + expire_time=timestamp, + ) + + backup_pb = table.Backup( + source_table=TABLE_NAME, expire_time=_datetime_to_pb_timestamp(timestamp), + ) + + with pytest.raises(Conflict): + backup.create(CLUSTER_ID) + + api.create_backup.assert_called_once_with( + request={"parent": CLUSTER_NAME, "backup_id": BACKUP_ID, "backup": backup_pb} + ) + + +def test_backup_create_w_instance_not_found(): + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import table + from google.cloud.exceptions import NotFound + + client = _Client() + api = client.table_admin_client = _make_table_admin_client() + api.create_backup.side_effect = NotFound("testing") + + timestamp = _make_timestamp() + backup = _make_backup( + BACKUP_ID, + _Instance(INSTANCE_NAME, client=client), + table_id=TABLE_ID, + expire_time=timestamp, + ) + + backup_pb = table.Backup( + source_table=TABLE_NAME, expire_time=_datetime_to_pb_timestamp(timestamp), + ) + + with pytest.raises(NotFound): + backup.create(CLUSTER_ID) + + api.create_backup.assert_called_once_with( + request={"parent": CLUSTER_NAME, "backup_id": BACKUP_ID, "backup": backup_pb} + ) + + +def test_backup_create_w_cluster_not_set(): + backup = _make_backup( + BACKUP_ID, + _Instance(INSTANCE_NAME), + table_id=TABLE_ID, + expire_time=_make_timestamp(), + ) + + with pytest.raises(ValueError): + backup.create() + + +def test_backup_create_w_table_not_set(): + backup = _make_backup( + BACKUP_ID, _Instance(INSTANCE_NAME), expire_time=_make_timestamp(), + ) + + with pytest.raises(ValueError): + backup.create(CLUSTER_ID) + + +def test_backup_create_w_expire_time_not_set(): + backup = _make_backup(BACKUP_ID, _Instance(INSTANCE_NAME), table_id=TABLE_ID,) + + with pytest.raises(ValueError): + backup.create(CLUSTER_ID) + + +def test_backup_create_success(): + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import table + from google.cloud.bigtable import Client + + op_future = object() + credentials = _make_credentials() + client = Client(project=PROJECT_ID, credentials=credentials, admin=True) + api = client._table_admin_client = _make_table_admin_client() + api.create_backup.return_value = op_future + + timestamp = _make_timestamp() + backup = _make_backup( + BACKUP_ID, + _Instance(INSTANCE_NAME, client=client), + table_id=TABLE_ID, + expire_time=timestamp, + ) -class TestBackup(unittest.TestCase): - PROJECT_ID = "project-id" - INSTANCE_ID = "instance-id" - INSTANCE_NAME = "projects/" + PROJECT_ID + "/instances/" + INSTANCE_ID - CLUSTER_ID = "cluster-id" - CLUSTER_NAME = INSTANCE_NAME + "/clusters/" + CLUSTER_ID - TABLE_ID = "table-id" - TABLE_NAME = INSTANCE_NAME + "/tables/" + TABLE_ID - BACKUP_ID = "backup-id" - BACKUP_NAME = CLUSTER_NAME + "/backups/" + BACKUP_ID - - ALT_INSTANCE = "other-instance-id" - ALT_INSTANCE_NAME = "projects/" + PROJECT_ID + "/instances/" + ALT_INSTANCE - ALT_CLUSTER_NAME = ALT_INSTANCE_NAME + "/clusters/" + CLUSTER_ID - ALT_BACKUP_NAME = ALT_CLUSTER_NAME + "/backups/" + BACKUP_ID - - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.backup import Backup - - return Backup - - @staticmethod - def _make_table_admin_client(): - from google.cloud.bigtable_admin_v2 import BigtableTableAdminClient - - return mock.create_autospec(BigtableTableAdminClient, instance=True) - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def _make_timestamp(self): - return datetime.datetime.utcnow().replace(tzinfo=UTC) - - def test_constructor_defaults(self): - instance = _Instance(self.INSTANCE_NAME) - backup = self._make_one(self.BACKUP_ID, instance) - - self.assertEqual(backup.backup_id, self.BACKUP_ID) - self.assertIs(backup._instance, instance) - self.assertIsNone(backup._cluster) - self.assertIsNone(backup.table_id) - self.assertIsNone(backup._expire_time) - - self.assertIsNone(backup._parent) - self.assertIsNone(backup._source_table) - self.assertIsNone(backup._start_time) - self.assertIsNone(backup._end_time) - self.assertIsNone(backup._size_bytes) - self.assertIsNone(backup._state) - self.assertIsNone(backup._encryption_info) - - def test_constructor_non_defaults(self): - instance = _Instance(self.INSTANCE_NAME) - expire_time = self._make_timestamp() - - backup = self._make_one( - self.BACKUP_ID, - instance, - cluster_id=self.CLUSTER_ID, - table_id=self.TABLE_ID, - expire_time=expire_time, - encryption_info="encryption_info", - ) - - self.assertEqual(backup.backup_id, self.BACKUP_ID) - self.assertIs(backup._instance, instance) - self.assertIs(backup._cluster, self.CLUSTER_ID) - self.assertEqual(backup.table_id, self.TABLE_ID) - self.assertEqual(backup._expire_time, expire_time) - self.assertEqual(backup._encryption_info, "encryption_info") - - self.assertIsNone(backup._parent) - self.assertIsNone(backup._source_table) - self.assertIsNone(backup._start_time) - self.assertIsNone(backup._end_time) - self.assertIsNone(backup._size_bytes) - self.assertIsNone(backup._state) - - def test_from_pb_project_mismatch(self): - from google.cloud.bigtable_admin_v2.types import table - - alt_project_id = "alt-project-id" - client = _Client(project=alt_project_id) - instance = _Instance(self.INSTANCE_NAME, client) - backup_pb = table.Backup(name=self.BACKUP_NAME) - klasse = self._get_target_class() - - with self.assertRaises(ValueError): - klasse.from_pb(backup_pb, instance) - - def test_from_pb_instance_mismatch(self): - from google.cloud.bigtable_admin_v2.types import table - - alt_instance = "/projects/%s/instances/alt-instance" % self.PROJECT_ID - client = _Client() - instance = _Instance(alt_instance, client) - backup_pb = table.Backup(name=self.BACKUP_NAME) - klasse = self._get_target_class() - - with self.assertRaises(ValueError): - klasse.from_pb(backup_pb, instance) - - def test_from_pb_bad_name(self): - from google.cloud.bigtable_admin_v2.types import table - - client = _Client() - instance = _Instance(self.INSTANCE_NAME, client) - backup_pb = table.Backup(name="invalid_name") - klasse = self._get_target_class() - - with self.assertRaises(ValueError): - klasse.from_pb(backup_pb, instance) - - def test_from_pb_success(self): - from google.cloud.bigtable.encryption_info import EncryptionInfo - from google.cloud.bigtable.error import Status - from google.cloud.bigtable_admin_v2.types import table - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.rpc.code_pb2 import Code - - client = _Client() - instance = _Instance(self.INSTANCE_NAME, client) - timestamp = _datetime_to_pb_timestamp(self._make_timestamp()) - size_bytes = 1234 - state = table.Backup.State.READY - GOOGLE_DEFAULT_ENCRYPTION = ( - table.EncryptionInfo.EncryptionType.GOOGLE_DEFAULT_ENCRYPTION - ) - backup_pb = table.Backup( - name=self.BACKUP_NAME, - source_table=self.TABLE_NAME, - expire_time=timestamp, - start_time=timestamp, - end_time=timestamp, - size_bytes=size_bytes, - state=state, - encryption_info=table.EncryptionInfo( - encryption_type=GOOGLE_DEFAULT_ENCRYPTION, - encryption_status=_StatusPB(Code.OK, "Status OK"), - kms_key_version="2", - ), - ) - klasse = self._get_target_class() - - backup = klasse.from_pb(backup_pb, instance) - - self.assertTrue(isinstance(backup, klasse)) - self.assertEqual(backup._instance, instance) - self.assertEqual(backup.backup_id, self.BACKUP_ID) - self.assertEqual(backup.cluster, self.CLUSTER_ID) - self.assertEqual(backup.table_id, self.TABLE_ID) - self.assertEqual(backup._expire_time, timestamp) - self.assertEqual(backup.start_time, timestamp) - self.assertEqual(backup.end_time, timestamp) - self.assertEqual(backup._size_bytes, size_bytes) - self.assertEqual(backup._state, state) - self.assertEqual( - backup.encryption_info, - EncryptionInfo( - encryption_type=GOOGLE_DEFAULT_ENCRYPTION, - encryption_status=Status(_StatusPB(Code.OK, "Status OK")), - kms_key_version="2", - ), - ) - - def test_property_name(self): - from google.cloud.bigtable.client import Client - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - - api = mock.create_autospec(BigtableInstanceAdminClient) - credentials = _make_credentials() - client = Client(project=self.PROJECT_ID, credentials=credentials, admin=True) - client._table_admin_client = api - instance = _Instance(self.INSTANCE_NAME, client) - - backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID) - self.assertEqual(backup.name, self.BACKUP_NAME) - - def test_property_cluster(self): - backup = self._make_one( - self.BACKUP_ID, _Instance(self.INSTANCE_NAME), cluster_id=self.CLUSTER_ID - ) - self.assertEqual(backup.cluster, self.CLUSTER_ID) - - def test_property_cluster_setter(self): - backup = self._make_one(self.BACKUP_ID, _Instance(self.INSTANCE_NAME)) - backup.cluster = self.CLUSTER_ID - self.assertEqual(backup.cluster, self.CLUSTER_ID) - - def test_property_parent_none(self): - backup = self._make_one(self.BACKUP_ID, _Instance(self.INSTANCE_NAME),) - self.assertIsNone(backup.parent) - - def test_property_parent_w_cluster(self): - from google.cloud.bigtable.client import Client - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - - api = mock.create_autospec(BigtableInstanceAdminClient) - credentials = _make_credentials() - client = Client(project=self.PROJECT_ID, credentials=credentials, admin=True) - client._table_admin_client = api - instance = _Instance(self.INSTANCE_NAME, client) - - backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID) - self.assertEqual(backup._cluster, self.CLUSTER_ID) - self.assertEqual(backup.parent, self.CLUSTER_NAME) - - def test_property_source_table_none(self): - from google.cloud.bigtable.client import Client - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - - api = mock.create_autospec(BigtableInstanceAdminClient) - credentials = _make_credentials() - client = Client(project=self.PROJECT_ID, credentials=credentials, admin=True) - client._table_admin_client = api - instance = _Instance(self.INSTANCE_NAME, client) - - backup = self._make_one(self.BACKUP_ID, instance) - self.assertIsNone(backup.source_table) - - def test_property_source_table_valid(self): - from google.cloud.bigtable.client import Client - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - - api = mock.create_autospec(BigtableInstanceAdminClient) - credentials = _make_credentials() - client = Client(project=self.PROJECT_ID, credentials=credentials, admin=True) - client._table_admin_client = api - instance = _Instance(self.INSTANCE_NAME, client) - - backup = self._make_one(self.BACKUP_ID, instance, table_id=self.TABLE_ID) - self.assertEqual(backup.source_table, self.TABLE_NAME) - - def test_property_expire_time(self): - instance = _Instance(self.INSTANCE_NAME) - expire_time = self._make_timestamp() - backup = self._make_one(self.BACKUP_ID, instance, expire_time=expire_time) - self.assertEqual(backup.expire_time, expire_time) - - def test_property_expire_time_setter(self): - instance = _Instance(self.INSTANCE_NAME) - expire_time = self._make_timestamp() - backup = self._make_one(self.BACKUP_ID, instance) - backup.expire_time = expire_time - self.assertEqual(backup.expire_time, expire_time) - - def test_property_start_time(self): - instance = _Instance(self.INSTANCE_NAME) - backup = self._make_one(self.BACKUP_ID, instance) - expected = backup._start_time = self._make_timestamp() - self.assertEqual(backup.start_time, expected) - - def test_property_end_time(self): - instance = _Instance(self.INSTANCE_NAME) - backup = self._make_one(self.BACKUP_ID, instance) - expected = backup._end_time = self._make_timestamp() - self.assertEqual(backup.end_time, expected) - - def test_property_size(self): - instance = _Instance(self.INSTANCE_NAME) - backup = self._make_one(self.BACKUP_ID, instance) - expected = backup._size_bytes = 10 - self.assertEqual(backup.size_bytes, expected) - - def test_property_state(self): - from google.cloud.bigtable_admin_v2.types import table - - instance = _Instance(self.INSTANCE_NAME) - backup = self._make_one(self.BACKUP_ID, instance) - expected = backup._state = table.Backup.State.READY - self.assertEqual(backup.state, expected) - - def test___eq__(self): - instance = object() - backup1 = self._make_one(self.BACKUP_ID, instance) - backup2 = self._make_one(self.BACKUP_ID, instance) - self.assertTrue(backup1 == backup2) - - def test___eq__different_types(self): - instance = object() - backup1 = self._make_one(self.BACKUP_ID, instance) - backup2 = object() - self.assertFalse(backup1 == backup2) - - def test___ne__same_value(self): - instance = object() - backup1 = self._make_one(self.BACKUP_ID, instance) - backup2 = self._make_one(self.BACKUP_ID, instance) - self.assertFalse(backup1 != backup2) - - def test___ne__(self): - backup1 = self._make_one("backup_1", "instance1") - backup2 = self._make_one("backup_2", "instance2") - self.assertTrue(backup1 != backup2) - - def test_create_grpc_error(self): - from google.api_core.exceptions import GoogleAPICallError - from google.api_core.exceptions import Unknown - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable_admin_v2.types import table - - client = _Client() - api = client.table_admin_client = self._make_table_admin_client() - api.create_backup.side_effect = Unknown("testing") - - timestamp = self._make_timestamp() - backup = self._make_one( - self.BACKUP_ID, - _Instance(self.INSTANCE_NAME, client=client), - table_id=self.TABLE_ID, - expire_time=timestamp, - ) - - backup_pb = table.Backup( - source_table=self.TABLE_NAME, - expire_time=_datetime_to_pb_timestamp(timestamp), - ) - - with self.assertRaises(GoogleAPICallError): - backup.create(self.CLUSTER_ID) - - api.create_backup.assert_called_once_with( - request={ - "parent": self.CLUSTER_NAME, - "backup_id": self.BACKUP_ID, - "backup": backup_pb, - } - ) - - def test_create_already_exists(self): - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable_admin_v2.types import table - from google.cloud.exceptions import Conflict - - client = _Client() - api = client.table_admin_client = self._make_table_admin_client() - api.create_backup.side_effect = Conflict("testing") - - timestamp = self._make_timestamp() - backup = self._make_one( - self.BACKUP_ID, - _Instance(self.INSTANCE_NAME, client=client), - table_id=self.TABLE_ID, - expire_time=timestamp, - ) - - backup_pb = table.Backup( - source_table=self.TABLE_NAME, - expire_time=_datetime_to_pb_timestamp(timestamp), - ) - - with self.assertRaises(Conflict): - backup.create(self.CLUSTER_ID) - - api.create_backup.assert_called_once_with( - request={ - "parent": self.CLUSTER_NAME, - "backup_id": self.BACKUP_ID, - "backup": backup_pb, - } - ) - - def test_create_instance_not_found(self): - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable_admin_v2.types import table - from google.cloud.exceptions import NotFound - - client = _Client() - api = client.table_admin_client = self._make_table_admin_client() - api.create_backup.side_effect = NotFound("testing") - - timestamp = self._make_timestamp() - backup = self._make_one( - self.BACKUP_ID, - _Instance(self.INSTANCE_NAME, client=client), - table_id=self.TABLE_ID, - expire_time=timestamp, - ) - - backup_pb = table.Backup( - source_table=self.TABLE_NAME, - expire_time=_datetime_to_pb_timestamp(timestamp), - ) - - with self.assertRaises(NotFound): - backup.create(self.CLUSTER_ID) - - api.create_backup.assert_called_once_with( - request={ - "parent": self.CLUSTER_NAME, - "backup_id": self.BACKUP_ID, - "backup": backup_pb, - } - ) - - def test_create_cluster_not_set(self): - backup = self._make_one( - self.BACKUP_ID, - _Instance(self.INSTANCE_NAME), - table_id=self.TABLE_ID, - expire_time=self._make_timestamp(), - ) - - with self.assertRaises(ValueError): - backup.create() - - def test_create_table_not_set(self): - backup = self._make_one( - self.BACKUP_ID, - _Instance(self.INSTANCE_NAME), - expire_time=self._make_timestamp(), - ) - - with self.assertRaises(ValueError): - backup.create(self.CLUSTER_ID) - - def test_create_expire_time_not_set(self): - backup = self._make_one( - self.BACKUP_ID, _Instance(self.INSTANCE_NAME), table_id=self.TABLE_ID, - ) - - with self.assertRaises(ValueError): - backup.create(self.CLUSTER_ID) - - def test_create_success(self): - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable_admin_v2.types import table - from google.cloud.bigtable import Client - - op_future = object() - credentials = _make_credentials() - client = Client(project=self.PROJECT_ID, credentials=credentials, admin=True) - api = client._table_admin_client = self._make_table_admin_client() - api.create_backup.return_value = op_future - - timestamp = self._make_timestamp() - backup = self._make_one( - self.BACKUP_ID, - _Instance(self.INSTANCE_NAME, client=client), - table_id=self.TABLE_ID, - expire_time=timestamp, - ) - - backup_pb = table.Backup( - source_table=self.TABLE_NAME, - expire_time=_datetime_to_pb_timestamp(timestamp), - ) - - future = backup.create(self.CLUSTER_ID) - self.assertEqual(backup._cluster, self.CLUSTER_ID) - self.assertIs(future, op_future) - - api.create_backup.assert_called_once_with( - request={ - "parent": self.CLUSTER_NAME, - "backup_id": self.BACKUP_ID, - "backup": backup_pb, - } - ) - - def test_exists_grpc_error(self): - from google.api_core.exceptions import Unknown - - client = _Client() - api = client.table_admin_client = self._make_table_admin_client() - api.get_backup.side_effect = Unknown("testing") - - instance = _Instance(self.INSTANCE_NAME, client=client) - backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID) - - with self.assertRaises(Unknown): - backup.exists() - - request = {"name": self.BACKUP_NAME} - api.get_backup.assert_called_once_with(request) - - def test_exists_not_found(self): - from google.api_core.exceptions import NotFound - - client = _Client() - api = client.table_admin_client = self._make_table_admin_client() - api.get_backup.side_effect = NotFound("testing") - - instance = _Instance(self.INSTANCE_NAME, client=client) - backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID) - - self.assertFalse(backup.exists()) - - api.get_backup.assert_called_once_with(request={"name": self.BACKUP_NAME}) - - def test_get(self): - from google.cloud.bigtable_admin_v2.types import table - from google.cloud._helpers import _datetime_to_pb_timestamp - - timestamp = _datetime_to_pb_timestamp(self._make_timestamp()) - state = table.Backup.State.READY - - client = _Client() - backup_pb = table.Backup( - name=self.BACKUP_NAME, - source_table=self.TABLE_NAME, - expire_time=timestamp, - start_time=timestamp, - end_time=timestamp, - size_bytes=0, - state=state, - ) - api = client.table_admin_client = self._make_table_admin_client() - api.get_backup.return_value = backup_pb - - instance = _Instance(self.INSTANCE_NAME, client=client) - backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID) - - self.assertEqual(backup.get(), backup_pb) - - def test_reload(self): - from google.cloud.bigtable_admin_v2.types import table - from google.cloud._helpers import _datetime_to_pb_timestamp - - timestamp = _datetime_to_pb_timestamp(self._make_timestamp()) - state = table.Backup.State.READY - - client = _Client() - backup_pb = table.Backup( - name=self.BACKUP_NAME, - source_table=self.TABLE_NAME, - expire_time=timestamp, - start_time=timestamp, - end_time=timestamp, - size_bytes=0, - state=state, - ) - api = client.table_admin_client = self._make_table_admin_client() - api.get_backup.return_value = backup_pb - - instance = _Instance(self.INSTANCE_NAME, client=client) - backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID) - - backup.reload() - self.assertEqual(backup._source_table, self.TABLE_NAME) - self.assertEqual(backup._expire_time, timestamp) - self.assertEqual(backup._start_time, timestamp) - self.assertEqual(backup._end_time, timestamp) - self.assertEqual(backup._size_bytes, 0) - self.assertEqual(backup._state, state) + backup_pb = table.Backup( + source_table=TABLE_NAME, expire_time=_datetime_to_pb_timestamp(timestamp), + ) - def test_exists_success(self): - from google.cloud.bigtable_admin_v2.types import table + future = backup.create(CLUSTER_ID) + assert backup._cluster == CLUSTER_ID + assert future is op_future - client = _Client() - backup_pb = table.Backup(name=self.BACKUP_NAME) - api = client.table_admin_client = self._make_table_admin_client() - api.get_backup.return_value = backup_pb + api.create_backup.assert_called_once_with( + request={"parent": CLUSTER_NAME, "backup_id": BACKUP_ID, "backup": backup_pb} + ) - instance = _Instance(self.INSTANCE_NAME, client=client) - backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID) - self.assertTrue(backup.exists()) +def test_backup_get(): + from google.cloud.bigtable_admin_v2.types import table + from google.cloud._helpers import _datetime_to_pb_timestamp - api.get_backup.assert_called_once_with(request={"name": self.BACKUP_NAME}) + timestamp = _datetime_to_pb_timestamp(_make_timestamp()) + state = table.Backup.State.READY - def test_delete_grpc_error(self): - from google.api_core.exceptions import Unknown + client = _Client() + backup_pb = table.Backup( + name=BACKUP_NAME, + source_table=TABLE_NAME, + expire_time=timestamp, + start_time=timestamp, + end_time=timestamp, + size_bytes=0, + state=state, + ) + api = client.table_admin_client = _make_table_admin_client() + api.get_backup.return_value = backup_pb - client = _Client() - api = client.table_admin_client = self._make_table_admin_client() - api.delete_backup.side_effect = Unknown("testing") - instance = _Instance(self.INSTANCE_NAME, client=client) - backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID) + instance = _Instance(INSTANCE_NAME, client=client) + backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID) - with self.assertRaises(Unknown): - backup.delete() + assert backup.get() == backup_pb - api.delete_backup.assert_called_once_with(request={"name": self.BACKUP_NAME}) - def test_delete_not_found(self): - from google.api_core.exceptions import NotFound +def test_backup_reload(): + from google.cloud.bigtable_admin_v2.types import table + from google.cloud._helpers import _datetime_to_pb_timestamp - client = _Client() - api = client.table_admin_client = self._make_table_admin_client() - api.delete_backup.side_effect = NotFound("testing") - instance = _Instance(self.INSTANCE_NAME, client=client) - backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID) + timestamp = _datetime_to_pb_timestamp(_make_timestamp()) + state = table.Backup.State.READY - with self.assertRaises(NotFound): - backup.delete() - - api.delete_backup.assert_called_once_with(request={"name": self.BACKUP_NAME}) - - def test_delete_success(self): - from google.protobuf.empty_pb2 import Empty - - client = _Client() - api = client.table_admin_client = self._make_table_admin_client() - api.delete_backup.return_value = Empty() - instance = _Instance(self.INSTANCE_NAME, client=client) - backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID) + client = _Client() + backup_pb = table.Backup( + name=BACKUP_NAME, + source_table=TABLE_NAME, + expire_time=timestamp, + start_time=timestamp, + end_time=timestamp, + size_bytes=0, + state=state, + ) + api = client.table_admin_client = _make_table_admin_client() + api.get_backup.return_value = backup_pb + instance = _Instance(INSTANCE_NAME, client=client) + backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID) + + backup.reload() + assert backup._source_table == TABLE_NAME + assert backup._expire_time == timestamp + assert backup._start_time == timestamp + assert backup._end_time == timestamp + assert backup._size_bytes == 0 + assert backup._state == state + + +def test_backup_exists_w_grpc_error(): + from google.api_core.exceptions import Unknown + + client = _Client() + api = client.table_admin_client = _make_table_admin_client() + api.get_backup.side_effect = Unknown("testing") + + instance = _Instance(INSTANCE_NAME, client=client) + backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID) + + with pytest.raises(Unknown): + backup.exists() + + request = {"name": BACKUP_NAME} + api.get_backup.assert_called_once_with(request) + + +def test_backup_exists_w_not_found(): + from google.api_core.exceptions import NotFound + + client = _Client() + api = client.table_admin_client = _make_table_admin_client() + api.get_backup.side_effect = NotFound("testing") + + instance = _Instance(INSTANCE_NAME, client=client) + backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID) + + assert not backup.exists() + + api.get_backup.assert_called_once_with(request={"name": BACKUP_NAME}) + + +def test_backup_exists_success(): + from google.cloud.bigtable_admin_v2.types import table + + client = _Client() + backup_pb = table.Backup(name=BACKUP_NAME) + api = client.table_admin_client = _make_table_admin_client() + api.get_backup.return_value = backup_pb + + instance = _Instance(INSTANCE_NAME, client=client) + backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID) + + assert backup.exists() + + api.get_backup.assert_called_once_with(request={"name": BACKUP_NAME}) + + +def test_backup_delete_w_grpc_error(): + from google.api_core.exceptions import Unknown + + client = _Client() + api = client.table_admin_client = _make_table_admin_client() + api.delete_backup.side_effect = Unknown("testing") + instance = _Instance(INSTANCE_NAME, client=client) + backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID) + + with pytest.raises(Unknown): backup.delete() - api.delete_backup.assert_called_once_with(request={"name": self.BACKUP_NAME}) - - def test_update_expire_time_grpc_error(self): - from google.api_core.exceptions import Unknown - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable_admin_v2.types import table - from google.protobuf import field_mask_pb2 - - client = _Client() - api = client.table_admin_client = self._make_table_admin_client() - api.update_backup.side_effect = Unknown("testing") - instance = _Instance(self.INSTANCE_NAME, client=client) - backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID) - expire_time = self._make_timestamp() - - with self.assertRaises(Unknown): - backup.update_expire_time(expire_time) - - backup_update = table.Backup( - name=self.BACKUP_NAME, expire_time=_datetime_to_pb_timestamp(expire_time), - ) - update_mask = field_mask_pb2.FieldMask(paths=["expire_time"]) - api.update_backup.assert_called_once_with( - request={"backup": backup_update, "update_mask": update_mask} - ) - - def test_update_expire_time_not_found(self): - from google.api_core.exceptions import NotFound - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable_admin_v2.types import table - from google.protobuf import field_mask_pb2 - - client = _Client() - api = client.table_admin_client = self._make_table_admin_client() - api.update_backup.side_effect = NotFound("testing") - instance = _Instance(self.INSTANCE_NAME, client=client) - backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID) - expire_time = self._make_timestamp() - - with self.assertRaises(NotFound): - backup.update_expire_time(expire_time) - - backup_update = table.Backup( - name=self.BACKUP_NAME, expire_time=_datetime_to_pb_timestamp(expire_time), - ) - update_mask = field_mask_pb2.FieldMask(paths=["expire_time"]) - api.update_backup.assert_called_once_with( - request={"backup": backup_update, "update_mask": update_mask} - ) - - def test_update_expire_time_success(self): - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable_admin_v2.types import table - from google.protobuf import field_mask_pb2 - - client = _Client() - api = client.table_admin_client = self._make_table_admin_client() - api.update_backup.return_type = table.Backup(name=self.BACKUP_NAME) - instance = _Instance(self.INSTANCE_NAME, client=client) - backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID) - expire_time = self._make_timestamp() + api.delete_backup.assert_called_once_with(request={"name": BACKUP_NAME}) + + +def test_backup_delete_w_not_found(): + from google.api_core.exceptions import NotFound + + client = _Client() + api = client.table_admin_client = _make_table_admin_client() + api.delete_backup.side_effect = NotFound("testing") + instance = _Instance(INSTANCE_NAME, client=client) + backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID) + + with pytest.raises(NotFound): + backup.delete() + + api.delete_backup.assert_called_once_with(request={"name": BACKUP_NAME}) + + +def test_backup_delete_success(): + from google.protobuf.empty_pb2 import Empty + client = _Client() + api = client.table_admin_client = _make_table_admin_client() + api.delete_backup.return_value = Empty() + instance = _Instance(INSTANCE_NAME, client=client) + backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID) + + backup.delete() + + api.delete_backup.assert_called_once_with(request={"name": BACKUP_NAME}) + + +def test_backup_update_expire_time_w_grpc_error(): + from google.api_core.exceptions import Unknown + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import table + from google.protobuf import field_mask_pb2 + + client = _Client() + api = client.table_admin_client = _make_table_admin_client() + api.update_backup.side_effect = Unknown("testing") + instance = _Instance(INSTANCE_NAME, client=client) + backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID) + expire_time = _make_timestamp() + + with pytest.raises(Unknown): + backup.update_expire_time(expire_time) + + backup_update = table.Backup( + name=BACKUP_NAME, expire_time=_datetime_to_pb_timestamp(expire_time), + ) + update_mask = field_mask_pb2.FieldMask(paths=["expire_time"]) + api.update_backup.assert_called_once_with( + request={"backup": backup_update, "update_mask": update_mask} + ) + + +def test_backup_update_expire_time_w_not_found(): + from google.api_core.exceptions import NotFound + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import table + from google.protobuf import field_mask_pb2 + + client = _Client() + api = client.table_admin_client = _make_table_admin_client() + api.update_backup.side_effect = NotFound("testing") + instance = _Instance(INSTANCE_NAME, client=client) + backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID) + expire_time = _make_timestamp() + + with pytest.raises(NotFound): backup.update_expire_time(expire_time) - backup_update = table.Backup( - name=self.BACKUP_NAME, expire_time=_datetime_to_pb_timestamp(expire_time), - ) - update_mask = field_mask_pb2.FieldMask(paths=["expire_time"]) - api.update_backup.assert_called_once_with( - request={"backup": backup_update, "update_mask": update_mask} - ) - - def test_restore_grpc_error(self): - from google.api_core.exceptions import GoogleAPICallError - from google.api_core.exceptions import Unknown - - client = _Client() - api = client.table_admin_client = self._make_table_admin_client() - api.restore_table.side_effect = Unknown("testing") - - timestamp = self._make_timestamp() - backup = self._make_one( - self.BACKUP_ID, - _Instance(self.INSTANCE_NAME, client=client), - cluster_id=self.CLUSTER_ID, - table_id=self.TABLE_NAME, - expire_time=timestamp, - ) - - with self.assertRaises(GoogleAPICallError): - backup.restore(self.TABLE_ID) - - api.restore_table.assert_called_once_with( - request={ - "parent": self.INSTANCE_NAME, - "table_id": self.TABLE_ID, - "backup": self.BACKUP_NAME, - } - ) - - def test_restore_cluster_not_set(self): - client = _Client() - client.table_admin_client = self._make_table_admin_client() - backup = self._make_one( - self.BACKUP_ID, - _Instance(self.INSTANCE_NAME, client=client), - table_id=self.TABLE_ID, - expire_time=self._make_timestamp(), - ) - - with self.assertRaises(ValueError): - backup.restore(self.TABLE_ID) - - def _restore_helper(self, instance_id=None, instance_name=None): - op_future = object() - client = _Client() - api = client.table_admin_client = self._make_table_admin_client() - api.restore_table.return_value = op_future - - timestamp = self._make_timestamp() - backup = self._make_one( - self.BACKUP_ID, - _Instance(self.INSTANCE_NAME, client=client), - cluster_id=self.CLUSTER_ID, - table_id=self.TABLE_NAME, - expire_time=timestamp, - ) - - future = backup.restore(self.TABLE_ID, instance_id) - self.assertEqual(backup._cluster, self.CLUSTER_ID) - self.assertIs(future, op_future) - - api.restore_table.assert_called_once_with( - request={ - "parent": instance_name or self.INSTANCE_NAME, - "table_id": self.TABLE_ID, - "backup": self.BACKUP_NAME, - } - ) - api.restore_table.reset_mock() - - def test_restore_default(self): - self._restore_helper() - - def test_restore_to_another_instance(self): - self._restore_helper(self.ALT_INSTANCE, self.ALT_INSTANCE_NAME) - - def test_get_iam_policy(self): - from google.cloud.bigtable.client import Client - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - BigtableTableAdminClient, - ) - from google.iam.v1 import policy_pb2 - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE - - credentials = _make_credentials() - client = Client(project=self.PROJECT_ID, credentials=credentials, admin=True) - - instance = client.instance(instance_id=self.INSTANCE_ID) - backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID) - - version = 1 - etag = b"etag_v1" - members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] - bindings = [{"role": BIGTABLE_ADMIN_ROLE, "members": members}] - iam_policy = policy_pb2.Policy(version=version, etag=etag, bindings=bindings) - - table_api = mock.create_autospec(BigtableTableAdminClient) - client._table_admin_client = table_api - table_api.get_iam_policy.return_value = iam_policy - - result = backup.get_iam_policy() - - table_api.get_iam_policy.assert_called_once_with( - request={"resource": backup.name} - ) - self.assertEqual(result.version, version) - self.assertEqual(result.etag, etag) - - admins = result.bigtable_admins - self.assertEqual(len(admins), len(members)) - for found, expected in zip(sorted(admins), sorted(members)): - self.assertEqual(found, expected) - - def test_set_iam_policy(self): - from google.cloud.bigtable.client import Client - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - BigtableTableAdminClient, - ) - from google.iam.v1 import policy_pb2 - from google.cloud.bigtable.policy import Policy - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE - - credentials = _make_credentials() - client = Client(project=self.PROJECT_ID, credentials=credentials, admin=True) - - instance = client.instance(instance_id=self.INSTANCE_ID) - backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID) - - version = 1 - etag = b"etag_v1" - members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] - bindings = [{"role": BIGTABLE_ADMIN_ROLE, "members": sorted(members)}] - iam_policy_pb = policy_pb2.Policy(version=version, etag=etag, bindings=bindings) - - table_api = mock.create_autospec(BigtableTableAdminClient) - client._table_admin_client = table_api - table_api.set_iam_policy.return_value = iam_policy_pb - - iam_policy = Policy(etag=etag, version=version) - iam_policy[BIGTABLE_ADMIN_ROLE] = [ - Policy.user("user1@test.com"), - Policy.service_account("service_acc1@test.com"), - ] - - result = backup.set_iam_policy(iam_policy) - - table_api.set_iam_policy.assert_called_once_with( - request={"resource": backup.name, "policy": iam_policy_pb} - ) - self.assertEqual(result.version, version) - self.assertEqual(result.etag, etag) - - admins = result.bigtable_admins - self.assertEqual(len(admins), len(members)) - for found, expected in zip(sorted(admins), sorted(members)): - self.assertEqual(found, expected) - - def test_test_iam_permissions(self): - from google.cloud.bigtable.client import Client - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - BigtableTableAdminClient, - ) - from google.iam.v1 import iam_policy_pb2 - - credentials = _make_credentials() - client = Client(project=self.PROJECT_ID, credentials=credentials, admin=True) - - instance = client.instance(instance_id=self.INSTANCE_ID) - backup = self._make_one(self.BACKUP_ID, instance, cluster_id=self.CLUSTER_ID) + backup_update = table.Backup( + name=BACKUP_NAME, expire_time=_datetime_to_pb_timestamp(expire_time), + ) + update_mask = field_mask_pb2.FieldMask(paths=["expire_time"]) + api.update_backup.assert_called_once_with( + request={"backup": backup_update, "update_mask": update_mask} + ) + + +def test_backup_update_expire_time_success(): + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import table + from google.protobuf import field_mask_pb2 + + client = _Client() + api = client.table_admin_client = _make_table_admin_client() + api.update_backup.return_type = table.Backup(name=BACKUP_NAME) + instance = _Instance(INSTANCE_NAME, client=client) + backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID) + expire_time = _make_timestamp() + + backup.update_expire_time(expire_time) + + backup_update = table.Backup( + name=BACKUP_NAME, expire_time=_datetime_to_pb_timestamp(expire_time), + ) + update_mask = field_mask_pb2.FieldMask(paths=["expire_time"]) + api.update_backup.assert_called_once_with( + request={"backup": backup_update, "update_mask": update_mask} + ) + + +def test_backup_restore_w_grpc_error(): + from google.api_core.exceptions import GoogleAPICallError + from google.api_core.exceptions import Unknown + + client = _Client() + api = client.table_admin_client = _make_table_admin_client() + api.restore_table.side_effect = Unknown("testing") + + timestamp = _make_timestamp() + backup = _make_backup( + BACKUP_ID, + _Instance(INSTANCE_NAME, client=client), + cluster_id=CLUSTER_ID, + table_id=TABLE_NAME, + expire_time=timestamp, + ) + + with pytest.raises(GoogleAPICallError): + backup.restore(TABLE_ID) + + api.restore_table.assert_called_once_with( + request={"parent": INSTANCE_NAME, "table_id": TABLE_ID, "backup": BACKUP_NAME} + ) + + +def test_backup_restore_w_cluster_not_set(): + client = _Client() + client.table_admin_client = _make_table_admin_client() + backup = _make_backup( + BACKUP_ID, + _Instance(INSTANCE_NAME, client=client), + table_id=TABLE_ID, + expire_time=_make_timestamp(), + ) + + with pytest.raises(ValueError): + backup.restore(TABLE_ID) + + +def _restore_helper(instance_id=None, instance_name=None): + op_future = object() + client = _Client() + api = client.table_admin_client = _make_table_admin_client() + api.restore_table.return_value = op_future + + timestamp = _make_timestamp() + backup = _make_backup( + BACKUP_ID, + _Instance(INSTANCE_NAME, client=client), + cluster_id=CLUSTER_ID, + table_id=TABLE_NAME, + expire_time=timestamp, + ) + + future = backup.restore(TABLE_ID, instance_id) + assert backup._cluster == CLUSTER_ID + assert future is op_future + + api.restore_table.assert_called_once_with( + request={ + "parent": instance_name or INSTANCE_NAME, + "table_id": TABLE_ID, + "backup": BACKUP_NAME, + } + ) + api.restore_table.reset_mock() + + +def test_backup_restore_default(): + _restore_helper() + + +def test_backup_restore_to_another_instance(): + _restore_helper(ALT_INSTANCE, ALT_INSTANCE_NAME) + + +def test_backup_get_iam_policy(): + from google.cloud.bigtable.client import Client + from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( + BigtableTableAdminClient, + ) + from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE + + credentials = _make_credentials() + client = Client(project=PROJECT_ID, credentials=credentials, admin=True) + + instance = client.instance(instance_id=INSTANCE_ID) + backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID) + + version = 1 + etag = b"etag_v1" + members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] + bindings = [{"role": BIGTABLE_ADMIN_ROLE, "members": members}] + iam_policy = policy_pb2.Policy(version=version, etag=etag, bindings=bindings) + + table_api = mock.create_autospec(BigtableTableAdminClient) + client._table_admin_client = table_api + table_api.get_iam_policy.return_value = iam_policy + + result = backup.get_iam_policy() + + table_api.get_iam_policy.assert_called_once_with(request={"resource": backup.name}) + assert result.version == version + assert result.etag == etag + + admins = result.bigtable_admins + assert len(admins) == len(members) + for found, expected in zip(sorted(admins), sorted(members)): + assert found == expected + + +def test_backup_set_iam_policy(): + from google.cloud.bigtable.client import Client + from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( + BigtableTableAdminClient, + ) + from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.policy import Policy + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE + + credentials = _make_credentials() + client = Client(project=PROJECT_ID, credentials=credentials, admin=True) + + instance = client.instance(instance_id=INSTANCE_ID) + backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID) + + version = 1 + etag = b"etag_v1" + members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] + bindings = [{"role": BIGTABLE_ADMIN_ROLE, "members": sorted(members)}] + iam_policy_pb = policy_pb2.Policy(version=version, etag=etag, bindings=bindings) + + table_api = mock.create_autospec(BigtableTableAdminClient) + client._table_admin_client = table_api + table_api.set_iam_policy.return_value = iam_policy_pb + + iam_policy = Policy(etag=etag, version=version) + iam_policy[BIGTABLE_ADMIN_ROLE] = [ + Policy.user("user1@test.com"), + Policy.service_account("service_acc1@test.com"), + ] + + result = backup.set_iam_policy(iam_policy) + + table_api.set_iam_policy.assert_called_once_with( + request={"resource": backup.name, "policy": iam_policy_pb} + ) + assert result.version == version + assert result.etag == etag + + admins = result.bigtable_admins + assert len(admins) == len(members) + for found, expected in zip(sorted(admins), sorted(members)): + assert found == expected + + +def test_backup_test_iam_permissions(): + from google.cloud.bigtable.client import Client + from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( + BigtableTableAdminClient, + ) + from google.iam.v1 import iam_policy_pb2 + + credentials = _make_credentials() + client = Client(project=PROJECT_ID, credentials=credentials, admin=True) + + instance = client.instance(instance_id=INSTANCE_ID) + backup = _make_backup(BACKUP_ID, instance, cluster_id=CLUSTER_ID) + + permissions = ["bigtable.backups.create", "bigtable.backups.list"] + + response = iam_policy_pb2.TestIamPermissionsResponse(permissions=permissions) - permissions = ["bigtable.backups.create", "bigtable.backups.list"] + table_api = mock.create_autospec(BigtableTableAdminClient) + table_api.test_iam_permissions.return_value = response + client._table_admin_client = table_api - response = iam_policy_pb2.TestIamPermissionsResponse(permissions=permissions) - - table_api = mock.create_autospec(BigtableTableAdminClient) - table_api.test_iam_permissions.return_value = response - client._table_admin_client = table_api - - result = backup.test_iam_permissions(permissions) + result = backup.test_iam_permissions(permissions) - self.assertEqual(result, permissions) - table_api.test_iam_permissions.assert_called_once_with( - request={"resource": backup.name, "permissions": permissions} - ) + assert result == permissions + table_api.test_iam_permissions.assert_called_once_with( + request={"resource": backup.name, "permissions": permissions} + ) class _Client(object): - def __init__(self, project=TestBackup.PROJECT_ID): + def __init__(self, project=PROJECT_ID): self.project = project self.project_name = "projects/" + self.project From 5870dc09f5ba0900073df4f835f52f5b49016baa Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 20 Oct 2021 14:24:28 -0400 Subject: [PATCH 03/16] tests: refactor 'batcher' unit tests w/ pytest idioms --- tests/unit/test_batcher.py | 189 +++++++++++++++++-------------------- 1 file changed, 85 insertions(+), 104 deletions(-) diff --git a/tests/unit/test_batcher.py b/tests/unit/test_batcher.py index 8760c3a2d..9ae6ed175 100644 --- a/tests/unit/test_batcher.py +++ b/tests/unit/test_batcher.py @@ -13,154 +13,135 @@ # limitations under the License. -import unittest - import mock +import pytest -from ._testing import _make_credentials - -from google.cloud.bigtable.batcher import MutationsBatcher from google.cloud.bigtable.row import DirectRow +TABLE_ID = "table-id" +TABLE_NAME = "/tables/" + TABLE_ID -class TestMutationsBatcher(unittest.TestCase): - from grpc import StatusCode - TABLE_ID = "table-id" - TABLE_NAME = "/tables/" + TABLE_ID +def _make_mutation_batcher(table, **kw): + from google.cloud.bigtable.batcher import MutationsBatcher - # RPC Status Codes - SUCCESS = StatusCode.OK.value[0] + return MutationsBatcher(table, **kw) - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.table import Table - return Table +def test_mutation_batcher_constructor(): + table = _Table(TABLE_NAME) - def _make_table(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) + mutation_batcher = _make_mutation_batcher(table) + assert table is mutation_batcher.table - @staticmethod - def _get_target_client_class(): - from google.cloud.bigtable.client import Client - return Client +def test_mutation_batcher_mutate_row(): + table = _Table(TABLE_NAME) + mutation_batcher = _make_mutation_batcher(table=table) - def _make_client(self, *args, **kwargs): - return self._get_target_client_class()(*args, **kwargs) + rows = [ + DirectRow(row_key=b"row_key"), + DirectRow(row_key=b"row_key_2"), + DirectRow(row_key=b"row_key_3"), + DirectRow(row_key=b"row_key_4"), + ] - def test_constructor(self): - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) + mutation_batcher.mutate_rows(rows) + mutation_batcher.flush() - instance = client.instance(instance_id="instance-id") - table = self._make_table(self.TABLE_ID, instance) + assert table.mutation_calls == 1 - mutation_batcher = MutationsBatcher(table) - self.assertEqual(table, mutation_batcher.table) - def test_mutate_row(self): - table = _Table(self.TABLE_NAME) - mutation_batcher = MutationsBatcher(table=table) +def test_mutation_batcher_mutate(): + table = _Table(TABLE_NAME) + mutation_batcher = _make_mutation_batcher(table=table) - rows = [ - DirectRow(row_key=b"row_key"), - DirectRow(row_key=b"row_key_2"), - DirectRow(row_key=b"row_key_3"), - DirectRow(row_key=b"row_key_4"), - ] + row = DirectRow(row_key=b"row_key") + row.set_cell("cf1", b"c1", 1) + row.set_cell("cf1", b"c2", 2) + row.set_cell("cf1", b"c3", 3) + row.set_cell("cf1", b"c4", 4) - mutation_batcher.mutate_rows(rows) - mutation_batcher.flush() + mutation_batcher.mutate(row) - self.assertEqual(table.mutation_calls, 1) + mutation_batcher.flush() - def test_mutate_rows(self): - table = _Table(self.TABLE_NAME) - mutation_batcher = MutationsBatcher(table=table) + assert table.mutation_calls == 1 - row = DirectRow(row_key=b"row_key") - row.set_cell("cf1", b"c1", 1) - row.set_cell("cf1", b"c2", 2) - row.set_cell("cf1", b"c3", 3) - row.set_cell("cf1", b"c4", 4) - mutation_batcher.mutate(row) +def test_mutation_batcher_flush_w_no_rows(): + table = _Table(TABLE_NAME) + mutation_batcher = _make_mutation_batcher(table=table) + mutation_batcher.flush() - mutation_batcher.flush() + assert table.mutation_calls == 0 - self.assertEqual(table.mutation_calls, 1) - def test_flush_with_no_rows(self): - table = _Table(self.TABLE_NAME) - mutation_batcher = MutationsBatcher(table=table) - mutation_batcher.flush() +def test_mutation_batcher_mutate_w_max_flush_count(): + table = _Table(TABLE_NAME) + mutation_batcher = _make_mutation_batcher(table=table, flush_count=3) - self.assertEqual(table.mutation_calls, 0) + row_1 = DirectRow(row_key=b"row_key_1") + row_2 = DirectRow(row_key=b"row_key_2") + row_3 = DirectRow(row_key=b"row_key_3") - def test_add_row_with_max_flush_count(self): - table = _Table(self.TABLE_NAME) - mutation_batcher = MutationsBatcher(table=table, flush_count=3) + mutation_batcher.mutate(row_1) + mutation_batcher.mutate(row_2) + mutation_batcher.mutate(row_3) - row_1 = DirectRow(row_key=b"row_key_1") - row_2 = DirectRow(row_key=b"row_key_2") - row_3 = DirectRow(row_key=b"row_key_3") + assert table.mutation_calls == 1 - mutation_batcher.mutate(row_1) - mutation_batcher.mutate(row_2) - mutation_batcher.mutate(row_3) - self.assertEqual(table.mutation_calls, 1) +@mock.patch("google.cloud.bigtable.batcher.MAX_MUTATIONS", new=3) +def test_mutation_batcher_mutate_with_max_mutations_failure(): + from google.cloud.bigtable.batcher import MaxMutationsError - @mock.patch("google.cloud.bigtable.batcher.MAX_MUTATIONS", new=3) - def test_mutate_row_with_max_mutations_failure(self): - from google.cloud.bigtable.batcher import MaxMutationsError + table = _Table(TABLE_NAME) + mutation_batcher = _make_mutation_batcher(table=table) - table = _Table(self.TABLE_NAME) - mutation_batcher = MutationsBatcher(table=table) + row = DirectRow(row_key=b"row_key") + row.set_cell("cf1", b"c1", 1) + row.set_cell("cf1", b"c2", 2) + row.set_cell("cf1", b"c3", 3) + row.set_cell("cf1", b"c4", 4) - row = DirectRow(row_key=b"row_key") - row.set_cell("cf1", b"c1", 1) - row.set_cell("cf1", b"c2", 2) - row.set_cell("cf1", b"c3", 3) - row.set_cell("cf1", b"c4", 4) + with pytest.raises(MaxMutationsError): + mutation_batcher.mutate(row) - with self.assertRaises(MaxMutationsError): - mutation_batcher.mutate(row) - @mock.patch("google.cloud.bigtable.batcher.MAX_MUTATIONS", new=3) - def test_mutate_row_with_max_mutations(self): - table = _Table(self.TABLE_NAME) - mutation_batcher = MutationsBatcher(table=table) +@mock.patch("google.cloud.bigtable.batcher.MAX_MUTATIONS", new=3) +def test_mutation_batcher_mutate_w_max_mutations(): + table = _Table(TABLE_NAME) + mutation_batcher = _make_mutation_batcher(table=table) - row = DirectRow(row_key=b"row_key") - row.set_cell("cf1", b"c1", 1) - row.set_cell("cf1", b"c2", 2) - row.set_cell("cf1", b"c3", 3) + row = DirectRow(row_key=b"row_key") + row.set_cell("cf1", b"c1", 1) + row.set_cell("cf1", b"c2", 2) + row.set_cell("cf1", b"c3", 3) - mutation_batcher.mutate(row) - mutation_batcher.flush() + mutation_batcher.mutate(row) + mutation_batcher.flush() - self.assertEqual(table.mutation_calls, 1) + assert table.mutation_calls == 1 - def test_mutate_row_with_max_row_bytes(self): - table = _Table(self.TABLE_NAME) - mutation_batcher = MutationsBatcher(table=table, max_row_bytes=3 * 1024 * 1024) - number_of_bytes = 1 * 1024 * 1024 - max_value = b"1" * number_of_bytes +def test_mutation_batcher_mutate_w_max_row_bytes(): + table = _Table(TABLE_NAME) + mutation_batcher = _make_mutation_batcher( + table=table, max_row_bytes=3 * 1024 * 1024 + ) - row = DirectRow(row_key=b"row_key") - row.set_cell("cf1", b"c1", max_value) - row.set_cell("cf1", b"c2", max_value) - row.set_cell("cf1", b"c3", max_value) + number_of_bytes = 1 * 1024 * 1024 + max_value = b"1" * number_of_bytes - mutation_batcher.mutate(row) + row = DirectRow(row_key=b"row_key") + row.set_cell("cf1", b"c1", max_value) + row.set_cell("cf1", b"c2", max_value) + row.set_cell("cf1", b"c3", max_value) + + mutation_batcher.mutate(row) - self.assertEqual(table.mutation_calls, 1) + assert table.mutation_calls == 1 class _Instance(object): From 23587d7691bad559553171d7df701baebe299ad8 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 20 Oct 2021 14:43:43 -0400 Subject: [PATCH 04/16] tests: refactor 'client' unit tests w/ pytest idioms --- tests/unit/test_client.py | 1309 ++++++++++++++++++------------------- 1 file changed, 653 insertions(+), 656 deletions(-) diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index f6cd7a5cc..00f8524bc 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -13,750 +13,747 @@ # limitations under the License. -import unittest - import mock +import pytest from ._testing import _make_credentials +PROJECT = "PROJECT" +INSTANCE_ID = "instance-id" +DISPLAY_NAME = "display-name" +USER_AGENT = "you-sir-age-int" + + +def _invoke_client_factory(client_class, **kw): + from google.cloud.bigtable.client import _create_gapic_client + + return _create_gapic_client(client_class, **kw) + + +def test___create_gapic_client_wo_emulator(): + client_class = mock.Mock() + credentials = _make_credentials() + client = _MockClient(credentials) + client_info = client._client_info = mock.Mock() + transport = mock.Mock() + + result = _invoke_client_factory(client_class, transport=transport)(client) + + assert result is client_class.return_value + client_class.assert_called_once_with( + credentials=None, + client_info=client_info, + client_options=None, + transport=transport, + ) + + +def test___create_gapic_client_wo_emulator_w_client_options(): + client_class = mock.Mock() + credentials = _make_credentials() + client = _MockClient(credentials) + client_info = client._client_info = mock.Mock() + client_options = mock.Mock() + transport = mock.Mock() + + result = _invoke_client_factory( + client_class, client_options=client_options, transport=transport + )(client) + + assert result is client_class.return_value + client_class.assert_called_once_with( + credentials=None, + client_info=client_info, + client_options=client_options, + transport=transport, + ) + + +def test___create_gapic_client_w_emulator(): + client_class = mock.Mock() + emulator_host = emulator_channel = object() + credentials = _make_credentials() + client_options = mock.Mock() + transport = mock.Mock() + + client = _MockClient( + credentials, emulator_host=emulator_host, emulator_channel=emulator_channel + ) + client_info = client._client_info = mock.Mock() + result = _invoke_client_factory( + client_class, client_options=client_options, transport=transport + )(client) + + assert result is client_class.return_value + client_class.assert_called_once_with( + credentials=None, + client_info=client_info, + client_options=client_options, + transport=transport, + ) + + +class _MockClient(object): + def __init__(self, credentials, emulator_host=None, emulator_channel=None): + self._credentials = credentials + self._emulator_host = emulator_host + self._emulator_channel = emulator_channel -class Test__create_gapic_client(unittest.TestCase): - def _invoke_client_factory(self, client_class, **kw): - from google.cloud.bigtable.client import _create_gapic_client - return _create_gapic_client(client_class, **kw) +def _make_client(*args, **kwargs): + from google.cloud.bigtable.client import Client - def test_wo_emulator(self): - client_class = mock.Mock() - credentials = _make_credentials() - client = _Client(credentials) - client_info = client._client_info = mock.Mock() - transport = mock.Mock() + return Client(*args, **kwargs) - result = self._invoke_client_factory(client_class, transport=transport)(client) - self.assertIs(result, client_class.return_value) - client_class.assert_called_once_with( - credentials=None, - client_info=client_info, - client_options=None, - transport=transport, - ) +@mock.patch("os.environ", {}) +def test_client_constructor_defaults(): + from google.api_core import client_info + from google.cloud.bigtable import __version__ + from google.cloud.bigtable.client import DATA_SCOPE - def test_wo_emulator_w_client_options(self): - client_class = mock.Mock() - credentials = _make_credentials() - client = _Client(credentials) - client_info = client._client_info = mock.Mock() - client_options = mock.Mock() - transport = mock.Mock() - - result = self._invoke_client_factory( - client_class, client_options=client_options, transport=transport - )(client) - - self.assertIs(result, client_class.return_value) - client_class.assert_called_once_with( - credentials=None, - client_info=client_info, - client_options=client_options, - transport=transport, - ) + credentials = _make_credentials() - def test_w_emulator(self): - client_class = mock.Mock() - emulator_host = emulator_channel = object() - credentials = _make_credentials() - client_options = mock.Mock() - transport = mock.Mock() + with mock.patch("google.auth.default") as mocked: + mocked.return_value = credentials, PROJECT + client = _make_client() - client = _Client( - credentials, emulator_host=emulator_host, emulator_channel=emulator_channel - ) - client_info = client._client_info = mock.Mock() - result = self._invoke_client_factory( - client_class, client_options=client_options, transport=transport - )(client) - - self.assertIs(result, client_class.return_value) - client_class.assert_called_once_with( - credentials=None, + assert client.project == PROJECT + assert client._credentials is credentials.with_scopes.return_value + assert not client._read_only + assert not client._admin + assert isinstance(client._client_info, client_info.ClientInfo) + assert client._client_info.client_library_version == __version__ + assert client._channel is None + assert client._emulator_host is None + assert client.SCOPE == (DATA_SCOPE,) + + +def test_client_constructor_explicit(): + import warnings + from google.cloud.bigtable.client import ADMIN_SCOPE + from google.cloud.bigtable.client import DATA_SCOPE + + credentials = _make_credentials() + client_info = mock.Mock() + + with warnings.catch_warnings(record=True) as warned: + client = _make_client( + project=PROJECT, + credentials=credentials, + read_only=False, + admin=True, client_info=client_info, - client_options=client_options, - transport=transport, - ) + channel=mock.sentinel.channel, + ) + + assert len(warned) == 1 + + assert client.project == PROJECT + assert client._credentials is credentials.with_scopes.return_value + assert not client._read_only + assert client._admin + assert client._client_info is client_info + assert client._channel is mock.sentinel.channel + assert client.SCOPE == (DATA_SCOPE, ADMIN_SCOPE) + +def test_client_constructor_w_both_admin_and_read_only(): + credentials = _make_credentials() + with pytest.raises(ValueError): + _make_client( + project=PROJECT, credentials=credentials, admin=True, read_only=True, + ) + + +def test_client_constructor_w_emulator_host(): + from google.cloud.environment_vars import BIGTABLE_EMULATOR + from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT + from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS + + emulator_host = "localhost:8081" + with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}): + with mock.patch("grpc.secure_channel") as factory: + client = _make_client() + # don't test local_composite_credentials + # client._local_composite_credentials = lambda: credentials + # channels are formed when needed, so access a client + # create a gapic channel + client.table_data_client + + assert client._emulator_host == emulator_host + assert client.project == _DEFAULT_BIGTABLE_EMULATOR_CLIENT + factory.assert_called_once_with( + emulator_host, + mock.ANY, # test of creds wrapping in '_emulator_host' below + options=_GRPC_CHANNEL_OPTIONS, + ) + + +def test_client_constructor_w_emulator_host_w_project(): + from google.cloud.environment_vars import BIGTABLE_EMULATOR + from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS + + emulator_host = "localhost:8081" + with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}): + with mock.patch("grpc.secure_channel") as factory: + client = _make_client(project=PROJECT) + # channels are formed when needed, so access a client + # create a gapic channel + client.table_data_client + + assert client._emulator_host == emulator_host + assert client.project == PROJECT + factory.assert_called_once_with( + emulator_host, + mock.ANY, # test of creds wrapping in '_emulator_host' below + options=_GRPC_CHANNEL_OPTIONS, + ) + + +def test_client_constructor_w_emulator_host_w_credentials(): + from google.cloud.environment_vars import BIGTABLE_EMULATOR + from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT + from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS + + emulator_host = "localhost:8081" + credentials = _make_credentials() + with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}): + with mock.patch("grpc.secure_channel") as factory: + client = _make_client(credentials=credentials) + # channels are formed when needed, so access a client + # create a gapic channel + client.table_data_client + + assert client._emulator_host == emulator_host + assert client.project == _DEFAULT_BIGTABLE_EMULATOR_CLIENT + factory.assert_called_once_with( + emulator_host, + mock.ANY, # test of creds wrapping in '_emulator_host' below + options=_GRPC_CHANNEL_OPTIONS, + ) + + +def test_client__get_scopes_default(): + from google.cloud.bigtable.client import DATA_SCOPE + + client = _make_client(project=PROJECT, credentials=_make_credentials()) + assert client._get_scopes() == (DATA_SCOPE,) + + +def test_client__get_scopes_w_admin(): + from google.cloud.bigtable.client import ADMIN_SCOPE + from google.cloud.bigtable.client import DATA_SCOPE + + client = _make_client(project=PROJECT, credentials=_make_credentials(), admin=True) + expected_scopes = (DATA_SCOPE, ADMIN_SCOPE) + assert client._get_scopes() == expected_scopes + + +def test_client__get_scopes_w_read_only(): + from google.cloud.bigtable.client import READ_ONLY_SCOPE + + client = _make_client( + project=PROJECT, credentials=_make_credentials(), read_only=True + ) + assert client._get_scopes() == (READ_ONLY_SCOPE,) + + +def test_client__emulator_channel_w_sync(): + emulator_host = "localhost:8081" + transport_name = "GrpcTransportTesting" + transport = mock.Mock(spec=["__name__"], __name__=transport_name) + options = mock.Mock(spec=[]) + client = _make_client( + project=PROJECT, credentials=_make_credentials(), read_only=True + ) + client._emulator_host = emulator_host + lcc = client._local_composite_credentials = mock.Mock(spec=[]) + + with mock.patch("grpc.secure_channel") as patched: + channel = client._emulator_channel(transport, options) + + assert channel is patched.return_value + patched.assert_called_once_with( + emulator_host, lcc.return_value, options=options, + ) + + +def test_client__emulator_channel_w_async(): + emulator_host = "localhost:8081" + transport_name = "GrpcAsyncIOTransportTesting" + transport = mock.Mock(spec=["__name__"], __name__=transport_name) + options = mock.Mock(spec=[]) + client = _make_client( + project=PROJECT, credentials=_make_credentials(), read_only=True + ) + client._emulator_host = emulator_host + lcc = client._local_composite_credentials = mock.Mock(spec=[]) + + with mock.patch("grpc.aio.secure_channel") as patched: + channel = client._emulator_channel(transport, options) + + assert channel is patched.return_value + patched.assert_called_once_with( + emulator_host, lcc.return_value, options=options, + ) + + +def test_client__local_composite_credentials(): + client = _make_client( + project=PROJECT, credentials=_make_credentials(), read_only=True + ) + + wsir_patch = mock.patch("google.auth.credentials.with_scopes_if_required") + request_patch = mock.patch("google.auth.transport.requests.Request") + amp_patch = mock.patch("google.auth.transport.grpc.AuthMetadataPlugin") + grpc_patches = mock.patch.multiple( + "grpc", + metadata_call_credentials=mock.DEFAULT, + local_channel_credentials=mock.DEFAULT, + composite_channel_credentials=mock.DEFAULT, + ) + with wsir_patch as wsir_patched: + with request_patch as request_patched: + with amp_patch as amp_patched: + with grpc_patches as grpc_patched: + credentials = client._local_composite_credentials() + + grpc_mcc = grpc_patched["metadata_call_credentials"] + grpc_lcc = grpc_patched["local_channel_credentials"] + grpc_ccc = grpc_patched["composite_channel_credentials"] + + assert credentials is grpc_ccc.return_value + + wsir_patched.assert_called_once_with(client._credentials, None) + request_patched.assert_called_once_with() + amp_patched.assert_called_once_with( + wsir_patched.return_value, request_patched.return_value, + ) + grpc_mcc.assert_called_once_with(amp_patched.return_value) + grpc_lcc.assert_called_once_with() + grpc_ccc.assert_called_once_with(grpc_lcc.return_value, grpc_mcc.return_value) + + +def _create_gapic_client_channel_helper(endpoint=None, emulator_host=None): + from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS + + client_class = mock.Mock(spec=["DEFAULT_ENDPOINT"]) + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials) + + if endpoint is not None: + client._client_options = mock.Mock( + spec=["api_endpoint"], api_endpoint=endpoint, + ) + expected_host = endpoint + else: + expected_host = client_class.DEFAULT_ENDPOINT + + if emulator_host is not None: + client._emulator_host = emulator_host + client._emulator_channel = mock.Mock(spec=[]) + expected_host = emulator_host -class _Client(object): - def __init__(self, credentials, emulator_host=None, emulator_channel=None): - self._credentials = credentials - self._emulator_host = emulator_host - self._emulator_channel = emulator_channel + grpc_transport = mock.Mock(spec=["create_channel"]) + transport = client._create_gapic_client_channel(client_class, grpc_transport) -class TestClient(unittest.TestCase): - - PROJECT = "PROJECT" - INSTANCE_ID = "instance-id" - DISPLAY_NAME = "display-name" - USER_AGENT = "you-sir-age-int" - - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.client import Client - - return Client - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - @mock.patch("os.environ", {}) - def test_constructor_defaults(self): - from google.api_core import client_info - from google.cloud.bigtable import __version__ - from google.cloud.bigtable.client import DATA_SCOPE - - credentials = _make_credentials() - - with mock.patch("google.auth.default") as mocked: - mocked.return_value = credentials, self.PROJECT - client = self._make_one() - - self.assertEqual(client.project, self.PROJECT) - self.assertIs(client._credentials, credentials.with_scopes.return_value) - self.assertFalse(client._read_only) - self.assertFalse(client._admin) - self.assertIsInstance(client._client_info, client_info.ClientInfo) - self.assertEqual(client._client_info.client_library_version, __version__) - self.assertIsNone(client._channel) - self.assertIsNone(client._emulator_host) - self.assertEqual(client.SCOPE, (DATA_SCOPE,)) - - def test_constructor_explicit(self): - import warnings - from google.cloud.bigtable.client import ADMIN_SCOPE - from google.cloud.bigtable.client import DATA_SCOPE - - credentials = _make_credentials() - client_info = mock.Mock() - - with warnings.catch_warnings(record=True) as warned: - client = self._make_one( - project=self.PROJECT, - credentials=credentials, - read_only=False, - admin=True, - client_info=client_info, - channel=mock.sentinel.channel, - ) - - self.assertEqual(len(warned), 1) - - self.assertEqual(client.project, self.PROJECT) - self.assertIs(client._credentials, credentials.with_scopes.return_value) - self.assertFalse(client._read_only) - self.assertTrue(client._admin) - self.assertIs(client._client_info, client_info) - self.assertIs(client._channel, mock.sentinel.channel) - self.assertEqual(client.SCOPE, (DATA_SCOPE, ADMIN_SCOPE)) - - def test_constructor_both_admin_and_read_only(self): - credentials = _make_credentials() - with self.assertRaises(ValueError): - self._make_one( - project=self.PROJECT, - credentials=credentials, - admin=True, - read_only=True, - ) - - def test_constructor_with_emulator_host(self): - from google.cloud.environment_vars import BIGTABLE_EMULATOR - from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT - from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS - - emulator_host = "localhost:8081" - with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}): - with mock.patch("grpc.secure_channel") as factory: - client = self._make_one() - # don't test local_composite_credentials - # client._local_composite_credentials = lambda: credentials - # channels are formed when needed, so access a client - # create a gapic channel - client.table_data_client - - self.assertEqual(client._emulator_host, emulator_host) - self.assertEqual(client.project, _DEFAULT_BIGTABLE_EMULATOR_CLIENT) - factory.assert_called_once_with( - emulator_host, - mock.ANY, # test of creds wrapping in '_emulator_host' below - options=_GRPC_CHANNEL_OPTIONS, - ) + assert transport is grpc_transport.return_value - def test_constructor_with_emulator_host_w_project(self): - from google.cloud.environment_vars import BIGTABLE_EMULATOR - from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS - - emulator_host = "localhost:8081" - with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}): - with mock.patch("grpc.secure_channel") as factory: - client = self._make_one(project=self.PROJECT) - # channels are formed when needed, so access a client - # create a gapic channel - client.table_data_client - - self.assertEqual(client._emulator_host, emulator_host) - self.assertEqual(client.project, self.PROJECT) - factory.assert_called_once_with( - emulator_host, - mock.ANY, # test of creds wrapping in '_emulator_host' below - options=_GRPC_CHANNEL_OPTIONS, + if emulator_host is not None: + client._emulator_channel.assert_called_once_with( + transport=grpc_transport, options=_GRPC_CHANNEL_OPTIONS, ) - - def test_constructor_with_emulator_host_w_credentials(self): - from google.cloud.environment_vars import BIGTABLE_EMULATOR - from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT - from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS - - emulator_host = "localhost:8081" - credentials = _make_credentials() - with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}): - with mock.patch("grpc.secure_channel") as factory: - client = self._make_one(credentials=credentials) - # channels are formed when needed, so access a client - # create a gapic channel - client.table_data_client - - self.assertEqual(client._emulator_host, emulator_host) - self.assertEqual(client.project, _DEFAULT_BIGTABLE_EMULATOR_CLIENT) - factory.assert_called_once_with( - emulator_host, - mock.ANY, # test of creds wrapping in '_emulator_host' below + grpc_transport.assert_called_once_with( + channel=client._emulator_channel.return_value, host=expected_host, + ) + else: + grpc_transport.create_channel.assert_called_once_with( + host=expected_host, + credentials=client._credentials, options=_GRPC_CHANNEL_OPTIONS, ) + grpc_transport.assert_called_once_with( + channel=grpc_transport.create_channel.return_value, host=expected_host, + ) - def test__get_scopes_default(self): - from google.cloud.bigtable.client import DATA_SCOPE - client = self._make_one(project=self.PROJECT, credentials=_make_credentials()) - self.assertEqual(client._get_scopes(), (DATA_SCOPE,)) +def test_client__create_gapic_client_channel_w_defaults(): + _create_gapic_client_channel_helper() - def test__get_scopes_admin(self): - from google.cloud.bigtable.client import ADMIN_SCOPE - from google.cloud.bigtable.client import DATA_SCOPE - client = self._make_one( - project=self.PROJECT, credentials=_make_credentials(), admin=True - ) - expected_scopes = (DATA_SCOPE, ADMIN_SCOPE) - self.assertEqual(client._get_scopes(), expected_scopes) +def test_client__create_gapic_client_channel_w_endpoint(): + endpoint = "api.example.com" + _create_gapic_client_channel_helper(endpoint=endpoint) - def test__get_scopes_read_only(self): - from google.cloud.bigtable.client import READ_ONLY_SCOPE - client = self._make_one( - project=self.PROJECT, credentials=_make_credentials(), read_only=True - ) - self.assertEqual(client._get_scopes(), (READ_ONLY_SCOPE,)) - - def test__emulator_channel_sync(self): - emulator_host = "localhost:8081" - transport_name = "GrpcTransportTesting" - transport = mock.Mock(spec=["__name__"], __name__=transport_name) - options = mock.Mock(spec=[]) - client = self._make_one( - project=self.PROJECT, credentials=_make_credentials(), read_only=True - ) - client._emulator_host = emulator_host - lcc = client._local_composite_credentials = mock.Mock(spec=[]) +def test_client__create_gapic_client_channel_w_emulator_host(): + host = "api.example.com:1234" + _create_gapic_client_channel_helper(emulator_host=host) - with mock.patch("grpc.secure_channel") as patched: - channel = client._emulator_channel(transport, options) - assert channel is patched.return_value - patched.assert_called_once_with( - emulator_host, lcc.return_value, options=options, - ) +def test_client__create_gapic_client_channel_w_endpoint_w_emulator_host(): + endpoint = "api.example.com" + host = "other.example.com:1234" + _create_gapic_client_channel_helper(endpoint=endpoint, emulator_host=host) - def test__emulator_channel_async(self): - emulator_host = "localhost:8081" - transport_name = "GrpcAsyncIOTransportTesting" - transport = mock.Mock(spec=["__name__"], __name__=transport_name) - options = mock.Mock(spec=[]) - client = self._make_one( - project=self.PROJECT, credentials=_make_credentials(), read_only=True - ) - client._emulator_host = emulator_host - lcc = client._local_composite_credentials = mock.Mock(spec=[]) - with mock.patch("grpc.aio.secure_channel") as patched: - channel = client._emulator_channel(transport, options) +def test_client_project_path(): + credentials = _make_credentials() + project = "PROJECT" + client = _make_client(project=project, credentials=credentials, admin=True) + project_name = "projects/" + project + assert client.project_path == project_name - assert channel is patched.return_value - patched.assert_called_once_with( - emulator_host, lcc.return_value, options=options, - ) - def test__local_composite_credentials(self): - client = self._make_one( - project=self.PROJECT, credentials=_make_credentials(), read_only=True - ) +def test_client_table_data_client_not_initialized(): + from google.cloud.bigtable_v2 import BigtableClient - wsir_patch = mock.patch("google.auth.credentials.with_scopes_if_required") - request_patch = mock.patch("google.auth.transport.requests.Request") - amp_patch = mock.patch("google.auth.transport.grpc.AuthMetadataPlugin") - grpc_patches = mock.patch.multiple( - "grpc", - metadata_call_credentials=mock.DEFAULT, - local_channel_credentials=mock.DEFAULT, - composite_channel_credentials=mock.DEFAULT, - ) - with wsir_patch as wsir_patched: - with request_patch as request_patched: - with amp_patch as amp_patched: - with grpc_patches as grpc_patched: - credentials = client._local_composite_credentials() - - grpc_mcc = grpc_patched["metadata_call_credentials"] - grpc_lcc = grpc_patched["local_channel_credentials"] - grpc_ccc = grpc_patched["composite_channel_credentials"] - - self.assertIs(credentials, grpc_ccc.return_value) - - wsir_patched.assert_called_once_with(client._credentials, None) - request_patched.assert_called_once_with() - amp_patched.assert_called_once_with( - wsir_patched.return_value, request_patched.return_value, - ) - grpc_mcc.assert_called_once_with(amp_patched.return_value) - grpc_lcc.assert_called_once_with() - grpc_ccc.assert_called_once_with(grpc_lcc.return_value, grpc_mcc.return_value) - - def _create_gapic_client_channel_helper( - self, endpoint=None, emulator_host=None, - ): - from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS - - client_class = mock.Mock(spec=["DEFAULT_ENDPOINT"]) - credentials = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=credentials) - - if endpoint is not None: - client._client_options = mock.Mock( - spec=["api_endpoint"], api_endpoint=endpoint, - ) - expected_host = endpoint - else: - expected_host = client_class.DEFAULT_ENDPOINT - - if emulator_host is not None: - client._emulator_host = emulator_host - client._emulator_channel = mock.Mock(spec=[]) - expected_host = emulator_host - - grpc_transport = mock.Mock(spec=["create_channel"]) - - transport = client._create_gapic_client_channel(client_class, grpc_transport) - - self.assertIs(transport, grpc_transport.return_value) - - if emulator_host is not None: - client._emulator_channel.assert_called_once_with( - transport=grpc_transport, options=_GRPC_CHANNEL_OPTIONS, - ) - grpc_transport.assert_called_once_with( - channel=client._emulator_channel.return_value, host=expected_host, - ) - else: - grpc_transport.create_channel.assert_called_once_with( - host=expected_host, - credentials=client._credentials, - options=_GRPC_CHANNEL_OPTIONS, - ) - grpc_transport.assert_called_once_with( - channel=grpc_transport.create_channel.return_value, host=expected_host, - ) - - def test__create_gapic_client_channel_w_defaults(self): - self._create_gapic_client_channel_helper() - - def test__create_gapic_client_channel_w_endpoint(self): - endpoint = "api.example.com" - self._create_gapic_client_channel_helper(endpoint=endpoint) - - def test__create_gapic_client_channel_w_emulator_host(self): - host = "api.example.com:1234" - self._create_gapic_client_channel_helper(emulator_host=host) - - def test__create_gapic_client_channel_w_endpoint_w_emulator_host(self): - endpoint = "api.example.com" - host = "other.example.com:1234" - self._create_gapic_client_channel_helper(endpoint=endpoint, emulator_host=host) - - def test_project_path_property(self): - credentials = _make_credentials() - project = "PROJECT" - client = self._make_one(project=project, credentials=credentials, admin=True) - project_name = "projects/" + project - self.assertEqual(client.project_path, project_name) - - def test_table_data_client_not_initialized(self): - from google.cloud.bigtable_v2 import BigtableClient - - credentials = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=credentials) + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials) - table_data_client = client.table_data_client - self.assertIsInstance(table_data_client, BigtableClient) - self.assertIs(client._table_data_client, table_data_client) + table_data_client = client.table_data_client + assert isinstance(table_data_client, BigtableClient) + assert client._table_data_client is table_data_client - def test_table_data_client_not_initialized_w_client_info(self): - from google.cloud.bigtable_v2 import BigtableClient - credentials = _make_credentials() - client_info = mock.Mock() - client = self._make_one( - project=self.PROJECT, credentials=credentials, client_info=client_info - ) +def test_client_table_data_client_not_initialized_w_client_info(): + from google.cloud.bigtable_v2 import BigtableClient - table_data_client = client.table_data_client - self.assertIsInstance(table_data_client, BigtableClient) - self.assertIs(client._client_info, client_info) - self.assertIs(client._table_data_client, table_data_client) + credentials = _make_credentials() + client_info = mock.Mock() + client = _make_client( + project=PROJECT, credentials=credentials, client_info=client_info + ) - def test_table_data_client_not_initialized_w_client_options(self): - from google.api_core.client_options import ClientOptions + table_data_client = client.table_data_client + assert isinstance(table_data_client, BigtableClient) + assert client._client_info is client_info + assert client._table_data_client is table_data_client - credentials = _make_credentials() - client_options = ClientOptions( - quota_project_id="QUOTA-PROJECT", api_endpoint="xyz" - ) - client = self._make_one( - project=self.PROJECT, credentials=credentials, client_options=client_options - ) - patch = mock.patch("google.cloud.bigtable_v2.BigtableClient") - with patch as mocked: - table_data_client = client.table_data_client +def test_client_table_data_client_not_initialized_w_client_options(): + from google.api_core.client_options import ClientOptions - self.assertIs(table_data_client, mocked.return_value) - self.assertIs(client._table_data_client, table_data_client) + credentials = _make_credentials() + client_options = ClientOptions(quota_project_id="QUOTA-PROJECT", api_endpoint="xyz") + client = _make_client( + project=PROJECT, credentials=credentials, client_options=client_options + ) - mocked.assert_called_once_with( - client_info=client._client_info, - credentials=None, - transport=mock.ANY, - client_options=client_options, - ) + patch = mock.patch("google.cloud.bigtable_v2.BigtableClient") + with patch as mocked: + table_data_client = client.table_data_client - def test_table_data_client_initialized(self): - credentials = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=credentials, admin=True - ) + assert table_data_client is mocked.return_value + assert client._table_data_client is table_data_client - already = client._table_data_client = object() - self.assertIs(client.table_data_client, already) + mocked.assert_called_once_with( + client_info=client._client_info, + credentials=None, + transport=mock.ANY, + client_options=client_options, + ) - def test_table_admin_client_not_initialized_no_admin_flag(self): - credentials = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=credentials) - with self.assertRaises(ValueError): - client.table_admin_client() +def test_client_table_data_client_initialized(): + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) - def test_table_admin_client_not_initialized_w_admin_flag(self): - from google.cloud.bigtable_admin_v2 import BigtableTableAdminClient + already = client._table_data_client = object() + assert client.table_data_client is already - credentials = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=credentials, admin=True - ) - table_admin_client = client.table_admin_client - self.assertIsInstance(table_admin_client, BigtableTableAdminClient) - self.assertIs(client._table_admin_client, table_admin_client) +def test_client_table_admin_client_not_initialized_no_admin_flag(): + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials) - def test_table_admin_client_not_initialized_w_client_info(self): - from google.cloud.bigtable_admin_v2 import BigtableTableAdminClient + with pytest.raises(ValueError): + client.table_admin_client() - credentials = _make_credentials() - client_info = mock.Mock() - client = self._make_one( - project=self.PROJECT, - credentials=credentials, - admin=True, - client_info=client_info, - ) - - table_admin_client = client.table_admin_client - self.assertIsInstance(table_admin_client, BigtableTableAdminClient) - self.assertIs(client._client_info, client_info) - self.assertIs(client._table_admin_client, table_admin_client) - - def test_table_admin_client_not_initialized_w_client_options(self): - credentials = _make_credentials() - admin_client_options = mock.Mock() - client = self._make_one( - project=self.PROJECT, - credentials=credentials, - admin=True, - admin_client_options=admin_client_options, - ) - client._create_gapic_client_channel = mock.Mock() - patch = mock.patch("google.cloud.bigtable_admin_v2.BigtableTableAdminClient") - with patch as mocked: - table_admin_client = client.table_admin_client - - self.assertIs(table_admin_client, mocked.return_value) - self.assertIs(client._table_admin_client, table_admin_client) - mocked.assert_called_once_with( - client_info=client._client_info, - credentials=None, - transport=mock.ANY, - client_options=admin_client_options, - ) +def test_client_table_admin_client_not_initialized_w_admin_flag(): + from google.cloud.bigtable_admin_v2 import BigtableTableAdminClient - def test_table_admin_client_initialized(self): - credentials = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=credentials, admin=True - ) + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) - already = client._table_admin_client = object() - self.assertIs(client.table_admin_client, already) + table_admin_client = client.table_admin_client + assert isinstance(table_admin_client, BigtableTableAdminClient) + assert client._table_admin_client is table_admin_client - def test_instance_admin_client_not_initialized_no_admin_flag(self): - credentials = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=credentials) - with self.assertRaises(ValueError): - client.instance_admin_client() +def test_client_table_admin_client_not_initialized_w_client_info(): + from google.cloud.bigtable_admin_v2 import BigtableTableAdminClient - def test_instance_admin_client_not_initialized_w_admin_flag(self): - from google.cloud.bigtable_admin_v2 import BigtableInstanceAdminClient + credentials = _make_credentials() + client_info = mock.Mock() + client = _make_client( + project=PROJECT, credentials=credentials, admin=True, client_info=client_info, + ) - credentials = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=credentials, admin=True - ) + table_admin_client = client.table_admin_client + assert isinstance(table_admin_client, BigtableTableAdminClient) + assert client._client_info is client_info + assert client._table_admin_client is table_admin_client - instance_admin_client = client.instance_admin_client - self.assertIsInstance(instance_admin_client, BigtableInstanceAdminClient) - self.assertIs(client._instance_admin_client, instance_admin_client) - def test_instance_admin_client_not_initialized_w_client_info(self): - from google.cloud.bigtable_admin_v2 import BigtableInstanceAdminClient +def test_client_table_admin_client_not_initialized_w_client_options(): + credentials = _make_credentials() + admin_client_options = mock.Mock() + client = _make_client( + project=PROJECT, + credentials=credentials, + admin=True, + admin_client_options=admin_client_options, + ) - credentials = _make_credentials() - client_info = mock.Mock() - client = self._make_one( - project=self.PROJECT, - credentials=credentials, - admin=True, - client_info=client_info, - ) + client._create_gapic_client_channel = mock.Mock() + patch = mock.patch("google.cloud.bigtable_admin_v2.BigtableTableAdminClient") + with patch as mocked: + table_admin_client = client.table_admin_client - instance_admin_client = client.instance_admin_client - self.assertIsInstance(instance_admin_client, BigtableInstanceAdminClient) - self.assertIs(client._client_info, client_info) - self.assertIs(client._instance_admin_client, instance_admin_client) - - def test_instance_admin_client_not_initialized_w_client_options(self): - credentials = _make_credentials() - admin_client_options = mock.Mock() - client = self._make_one( - project=self.PROJECT, - credentials=credentials, - admin=True, - admin_client_options=admin_client_options, - ) + assert table_admin_client is mocked.return_value + assert client._table_admin_client is table_admin_client + mocked.assert_called_once_with( + client_info=client._client_info, + credentials=None, + transport=mock.ANY, + client_options=admin_client_options, + ) - client._create_gapic_client_channel = mock.Mock() - patch = mock.patch("google.cloud.bigtable_admin_v2.BigtableInstanceAdminClient") - with patch as mocked: - instance_admin_client = client.instance_admin_client - - self.assertIs(instance_admin_client, mocked.return_value) - self.assertIs(client._instance_admin_client, instance_admin_client) - mocked.assert_called_once_with( - client_info=client._client_info, - credentials=None, - transport=mock.ANY, - client_options=admin_client_options, - ) - def test_instance_admin_client_initialized(self): - credentials = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=credentials, admin=True - ) +def test_client_table_admin_client_initialized(): + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) - already = client._instance_admin_client = object() - self.assertIs(client.instance_admin_client, already) - - def test_instance_factory_defaults(self): - from google.cloud.bigtable.instance import Instance - - PROJECT = "PROJECT" - INSTANCE_ID = "instance-id" - credentials = _make_credentials() - client = self._make_one(project=PROJECT, credentials=credentials) - - instance = client.instance(INSTANCE_ID) - - self.assertIsInstance(instance, Instance) - self.assertEqual(instance.instance_id, INSTANCE_ID) - self.assertEqual(instance.display_name, INSTANCE_ID) - self.assertIsNone(instance.type_) - self.assertIsNone(instance.labels) - self.assertIs(instance._client, client) - - def test_instance_factory_non_defaults(self): - from google.cloud.bigtable.instance import Instance - from google.cloud.bigtable import enums - - PROJECT = "PROJECT" - INSTANCE_ID = "instance-id" - DISPLAY_NAME = "display-name" - instance_type = enums.Instance.Type.DEVELOPMENT - labels = {"foo": "bar"} - credentials = _make_credentials() - client = self._make_one(project=PROJECT, credentials=credentials) - - instance = client.instance( - INSTANCE_ID, - display_name=DISPLAY_NAME, - instance_type=instance_type, - labels=labels, - ) + already = client._table_admin_client = object() + assert client.table_admin_client is already - self.assertIsInstance(instance, Instance) - self.assertEqual(instance.instance_id, INSTANCE_ID) - self.assertEqual(instance.display_name, DISPLAY_NAME) - self.assertEqual(instance.type_, instance_type) - self.assertEqual(instance.labels, labels) - self.assertIs(instance._client, client) - - def test_list_instances(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable_admin_v2.types import ( - bigtable_instance_admin as messages_v2_pb2, - ) - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.cloud.bigtable.instance import Instance - FAILED_LOCATION = "FAILED" - INSTANCE_ID1 = "instance-id1" - INSTANCE_ID2 = "instance-id2" - INSTANCE_NAME1 = "projects/" + self.PROJECT + "/instances/" + INSTANCE_ID1 - INSTANCE_NAME2 = "projects/" + self.PROJECT + "/instances/" + INSTANCE_ID2 +def test_client_instance_admin_client_not_initialized_no_admin_flag(): + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials) - api = mock.create_autospec(BigtableInstanceAdminClient) - credentials = _make_credentials() + with pytest.raises(ValueError): + client.instance_admin_client() - client = self._make_one( - project=self.PROJECT, credentials=credentials, admin=True - ) - # Create response_pb - response_pb = messages_v2_pb2.ListInstancesResponse( - failed_locations=[FAILED_LOCATION], - instances=[ - data_v2_pb2.Instance(name=INSTANCE_NAME1, display_name=INSTANCE_NAME1), - data_v2_pb2.Instance(name=INSTANCE_NAME2, display_name=INSTANCE_NAME2), - ], - ) +def test_client_instance_admin_client_not_initialized_w_admin_flag(): + from google.cloud.bigtable_admin_v2 import BigtableInstanceAdminClient - # Patch the stub used by the API method. - client._instance_admin_client = api - instance_stub = client._instance_admin_client + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) - instance_stub.list_instances.side_effect = [response_pb] + instance_admin_client = client.instance_admin_client + assert isinstance(instance_admin_client, BigtableInstanceAdminClient) + assert client._instance_admin_client is instance_admin_client - # Perform the method and check the result. - instances, failed_locations = client.list_instances() - instance_1, instance_2 = instances +def test_client_instance_admin_client_not_initialized_w_client_info(): + from google.cloud.bigtable_admin_v2 import BigtableInstanceAdminClient - self.assertIsInstance(instance_1, Instance) - self.assertEqual(instance_1.instance_id, INSTANCE_ID1) - self.assertTrue(instance_1._client is client) + credentials = _make_credentials() + client_info = mock.Mock() + client = _make_client( + project=PROJECT, credentials=credentials, admin=True, client_info=client_info, + ) - self.assertIsInstance(instance_2, Instance) - self.assertEqual(instance_2.instance_id, INSTANCE_ID2) - self.assertTrue(instance_2._client is client) + instance_admin_client = client.instance_admin_client + assert isinstance(instance_admin_client, BigtableInstanceAdminClient) + assert client._client_info is client_info + assert client._instance_admin_client is instance_admin_client - self.assertEqual(failed_locations, [FAILED_LOCATION]) - def test_list_clusters(self): - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.cloud.bigtable_admin_v2.types import ( - bigtable_instance_admin as messages_v2_pb2, - ) - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable.instance import Cluster +def test_client_instance_admin_client_not_initialized_w_client_options(): + credentials = _make_credentials() + admin_client_options = mock.Mock() + client = _make_client( + project=PROJECT, + credentials=credentials, + admin=True, + admin_client_options=admin_client_options, + ) - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - - credentials = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=credentials, admin=True - ) + client._create_gapic_client_channel = mock.Mock() + patch = mock.patch("google.cloud.bigtable_admin_v2.BigtableInstanceAdminClient") + with patch as mocked: + instance_admin_client = client.instance_admin_client - INSTANCE_ID1 = "instance-id1" - INSTANCE_ID2 = "instance-id2" + assert instance_admin_client is mocked.return_value + assert client._instance_admin_client is instance_admin_client + mocked.assert_called_once_with( + client_info=client._client_info, + credentials=None, + transport=mock.ANY, + client_options=admin_client_options, + ) - failed_location = "FAILED" - cluster_id1 = "{}-cluster".format(INSTANCE_ID1) - cluster_id2 = "{}-cluster-1".format(INSTANCE_ID2) - cluster_id3 = "{}-cluster-2".format(INSTANCE_ID2) - cluster_name1 = client.instance_admin_client.cluster_path( - self.PROJECT, INSTANCE_ID1, cluster_id1 - ) - cluster_name2 = client.instance_admin_client.cluster_path( - self.PROJECT, INSTANCE_ID2, cluster_id2 - ) - cluster_name3 = client.instance_admin_client.cluster_path( - self.PROJECT, INSTANCE_ID2, cluster_id3 - ) - # Create response_pb - response_pb = messages_v2_pb2.ListClustersResponse( - failed_locations=[failed_location], - clusters=[ - data_v2_pb2.Cluster(name=cluster_name1), - data_v2_pb2.Cluster(name=cluster_name2), - data_v2_pb2.Cluster(name=cluster_name3), - ], - ) +def test_client_instance_admin_client_initialized(): + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) - # Patch the stub used by the API method. - client._instance_admin_client = instance_api - instance_stub = client._instance_admin_client + already = client._instance_admin_client = object() + assert client.instance_admin_client is already - instance_stub.list_clusters.side_effect = [response_pb] - # Perform the method and check the result. - clusters, failed_locations = client.list_clusters() +def test_client_instance_factory_defaults(): + from google.cloud.bigtable.instance import Instance - cluster_1, cluster_2, cluster_3 = clusters + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials) - self.assertIsInstance(cluster_1, Cluster) - self.assertEqual(cluster_1.cluster_id, cluster_id1) - self.assertEqual(cluster_1._instance.instance_id, INSTANCE_ID1) + instance = client.instance(INSTANCE_ID) - self.assertIsInstance(cluster_2, Cluster) - self.assertEqual(cluster_2.cluster_id, cluster_id2) - self.assertEqual(cluster_2._instance.instance_id, INSTANCE_ID2) + assert isinstance(instance, Instance) + assert instance.instance_id == INSTANCE_ID + assert instance.display_name == INSTANCE_ID + assert instance.type_ is None + assert instance.labels is None + assert instance._client is client - self.assertIsInstance(cluster_3, Cluster) - self.assertEqual(cluster_3.cluster_id, cluster_id3) - self.assertEqual(cluster_3._instance.instance_id, INSTANCE_ID2) - self.assertEqual(failed_locations, [failed_location]) +def test_client_instance_factory_non_defaults(): + from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable import enums + + instance_type = enums.Instance.Type.DEVELOPMENT + labels = {"foo": "bar"} + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials) + + instance = client.instance( + INSTANCE_ID, + display_name=DISPLAY_NAME, + instance_type=instance_type, + labels=labels, + ) + + assert isinstance(instance, Instance) + assert instance.instance_id == INSTANCE_ID + assert instance.display_name == DISPLAY_NAME + assert instance.type_ == instance_type + assert instance.labels == labels + assert instance._client is client + + +def test_client_list_instances(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( + BigtableInstanceAdminClient, + ) + from google.cloud.bigtable.instance import Instance + + FAILED_LOCATION = "FAILED" + INSTANCE_ID1 = "instance-id1" + INSTANCE_ID2 = "instance-id2" + INSTANCE_NAME1 = "projects/" + PROJECT + "/instances/" + INSTANCE_ID1 + INSTANCE_NAME2 = "projects/" + PROJECT + "/instances/" + INSTANCE_ID2 + + api = mock.create_autospec(BigtableInstanceAdminClient) + credentials = _make_credentials() + + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + + # Create response_pb + response_pb = messages_v2_pb2.ListInstancesResponse( + failed_locations=[FAILED_LOCATION], + instances=[ + data_v2_pb2.Instance(name=INSTANCE_NAME1, display_name=INSTANCE_NAME1), + data_v2_pb2.Instance(name=INSTANCE_NAME2, display_name=INSTANCE_NAME2), + ], + ) + + # Patch the stub used by the API method. + client._instance_admin_client = api + instance_stub = client._instance_admin_client + + instance_stub.list_instances.side_effect = [response_pb] + + # Perform the method and check the result. + instances, failed_locations = client.list_instances() + + instance_1, instance_2 = instances + + assert isinstance(instance_1, Instance) + assert instance_1.instance_id == INSTANCE_ID1 + assert instance_1._client is client + + assert isinstance(instance_2, Instance) + assert instance_2.instance_id == INSTANCE_ID2 + assert instance_2._client is client + + assert failed_locations == [FAILED_LOCATION] + + +def test_client_list_clusters(): + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( + BigtableInstanceAdminClient, + ) + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.instance import Cluster + + instance_api = mock.create_autospec(BigtableInstanceAdminClient) + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + + INSTANCE_ID1 = "instance-id1" + INSTANCE_ID2 = "instance-id2" + + failed_location = "FAILED" + cluster_id1 = "{}-cluster".format(INSTANCE_ID1) + cluster_id2 = "{}-cluster-1".format(INSTANCE_ID2) + cluster_id3 = "{}-cluster-2".format(INSTANCE_ID2) + cluster_name1 = client.instance_admin_client.cluster_path( + PROJECT, INSTANCE_ID1, cluster_id1 + ) + cluster_name2 = client.instance_admin_client.cluster_path( + PROJECT, INSTANCE_ID2, cluster_id2 + ) + cluster_name3 = client.instance_admin_client.cluster_path( + PROJECT, INSTANCE_ID2, cluster_id3 + ) + + # Create response_pb + response_pb = messages_v2_pb2.ListClustersResponse( + failed_locations=[failed_location], + clusters=[ + data_v2_pb2.Cluster(name=cluster_name1), + data_v2_pb2.Cluster(name=cluster_name2), + data_v2_pb2.Cluster(name=cluster_name3), + ], + ) + + # Patch the stub used by the API method. + client._instance_admin_client = instance_api + instance_stub = client._instance_admin_client + + instance_stub.list_clusters.side_effect = [response_pb] + + # Perform the method and check the result. + clusters, failed_locations = client.list_clusters() + + cluster_1, cluster_2, cluster_3 = clusters + + assert isinstance(cluster_1, Cluster) + assert cluster_1.cluster_id == cluster_id1 + assert cluster_1._instance.instance_id == INSTANCE_ID1 + + assert isinstance(cluster_2, Cluster) + assert cluster_2.cluster_id == cluster_id2 + assert cluster_2._instance.instance_id == INSTANCE_ID2 + + assert isinstance(cluster_3, Cluster) + assert cluster_3.cluster_id == cluster_id3 + assert cluster_3._instance.instance_id == INSTANCE_ID2 + + assert failed_locations == [failed_location] From 4d67d2aa9cc84fb21b49e502394c417843a5b493 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 20 Oct 2021 15:38:55 -0400 Subject: [PATCH 05/16] tests: refactor 'cluster' unit tests w/ pytest idioms --- tests/unit/test_cluster.py | 1044 +++++++++++++++++------------------- 1 file changed, 507 insertions(+), 537 deletions(-) diff --git a/tests/unit/test_cluster.py b/tests/unit/test_cluster.py index 1194e53c9..74ca98830 100644 --- a/tests/unit/test_cluster.py +++ b/tests/unit/test_cluster.py @@ -13,552 +13,522 @@ # limitations under the License. -import unittest - import mock import pytest from ._testing import _make_credentials +PROJECT = "project" +INSTANCE_ID = "instance-id" +LOCATION_ID = "location-id" +CLUSTER_ID = "cluster-id" +LOCATION_ID = "location-id" +CLUSTER_NAME = ( + "projects/" + PROJECT + "/instances/" + INSTANCE_ID + "/clusters/" + CLUSTER_ID +) +LOCATION_PATH = "projects/" + PROJECT + "/locations/" +SERVE_NODES = 5 +OP_ID = 5678 +OP_NAME = "operations/projects/{}/instances/{}/clusters/{}/operations/{}".format( + PROJECT, INSTANCE_ID, CLUSTER_ID, OP_ID +) +KEY_RING_ID = "key-ring-id" +CRYPTO_KEY_ID = "crypto-key-id" +KMS_KEY_NAME = f"{LOCATION_PATH}/keyRings/{KEY_RING_ID}/cryptoKeys/{CRYPTO_KEY_ID}" + + +def _make_cluster(*args, **kwargs): + from google.cloud.bigtable.cluster import Cluster + + return Cluster(*args, **kwargs) + + +def _make_client(*args, **kwargs): + from google.cloud.bigtable.client import Client + + return Client(*args, **kwargs) + + +def test_cluster_constructor_defaults(): + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + + cluster = _make_cluster(CLUSTER_ID, instance) + + assert cluster.cluster_id == CLUSTER_ID + assert cluster._instance is instance + assert cluster.location_id is None + assert cluster.state is None + assert cluster.serve_nodes is None + assert cluster.default_storage_type is None + assert cluster.kms_key_name is None + + +def test_cluster_constructor_explicit(): + from google.cloud.bigtable.enums import StorageType + from google.cloud.bigtable.enums import Cluster + + STATE = Cluster.State.READY + STORAGE_TYPE_SSD = StorageType.SSD + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + + cluster = _make_cluster( + CLUSTER_ID, + instance, + location_id=LOCATION_ID, + _state=STATE, + serve_nodes=SERVE_NODES, + default_storage_type=STORAGE_TYPE_SSD, + kms_key_name=KMS_KEY_NAME, + ) + assert cluster.cluster_id == CLUSTER_ID + assert cluster._instance is instance + assert cluster.location_id == LOCATION_ID + assert cluster.state == STATE + assert cluster.serve_nodes == SERVE_NODES + assert cluster.default_storage_type == STORAGE_TYPE_SSD + assert cluster.kms_key_name == KMS_KEY_NAME + + +def test_cluster_name(): + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = _Instance(INSTANCE_ID, client) + cluster = _make_cluster(CLUSTER_ID, instance) + + assert cluster.name == CLUSTER_NAME + + +def test_cluster_kms_key_name(): + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + cluster = _make_cluster(CLUSTER_ID, instance, kms_key_name=KMS_KEY_NAME) + + assert cluster.kms_key_name == KMS_KEY_NAME + + +def test_cluster_kms_key_name_setter(): + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + cluster = _make_cluster(CLUSTER_ID, instance, kms_key_name=KMS_KEY_NAME) + + with pytest.raises(AttributeError): + cluster.kms_key_name = "I'm read only" + + +def test_cluster_from_pb_success(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.cluster import Cluster + from google.cloud.bigtable import enums + + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + + location = LOCATION_PATH + LOCATION_ID + state = enums.Cluster.State.RESIZING + storage_type = enums.StorageType.SSD + cluster_pb = data_v2_pb2.Cluster( + name=CLUSTER_NAME, + location=location, + state=state, + serve_nodes=SERVE_NODES, + default_storage_type=storage_type, + encryption_config=data_v2_pb2.Cluster.EncryptionConfig( + kms_key_name=KMS_KEY_NAME, + ), + ) + + cluster = Cluster.from_pb(cluster_pb, instance) + assert isinstance(cluster, Cluster) + assert cluster._instance == instance + assert cluster.cluster_id == CLUSTER_ID + assert cluster.location_id == LOCATION_ID + assert cluster.state == state + assert cluster.serve_nodes == SERVE_NODES + assert cluster.default_storage_type == storage_type + assert cluster.kms_key_name == KMS_KEY_NAME + + +def test_cluster_from_pb_w_bad_cluster_name(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.cluster import Cluster + + bad_cluster_name = "BAD_NAME" + + cluster_pb = data_v2_pb2.Cluster(name=bad_cluster_name) + + with pytest.raises(ValueError): + Cluster.from_pb(cluster_pb, None) + + +def test_cluster_from_pb_w_instance_id_mistmatch(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.cluster import Cluster + + ALT_INSTANCE_ID = "ALT_INSTANCE_ID" + client = _Client(PROJECT) + instance = _Instance(ALT_INSTANCE_ID, client) + + assert INSTANCE_ID != ALT_INSTANCE_ID + cluster_pb = data_v2_pb2.Cluster(name=CLUSTER_NAME) -class TestCluster(unittest.TestCase): + with pytest.raises(ValueError): + Cluster.from_pb(cluster_pb, instance) - PROJECT = "project" - INSTANCE_ID = "instance-id" - LOCATION_ID = "location-id" - CLUSTER_ID = "cluster-id" - LOCATION_ID = "location-id" - CLUSTER_NAME = ( - "projects/" + PROJECT + "/instances/" + INSTANCE_ID + "/clusters/" + CLUSTER_ID + +def test_cluster_from_pb_w_project_mistmatch(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.cluster import Cluster + + ALT_PROJECT = "ALT_PROJECT" + client = _Client(project=ALT_PROJECT) + instance = _Instance(INSTANCE_ID, client) + + assert PROJECT != ALT_PROJECT + cluster_pb = data_v2_pb2.Cluster(name=CLUSTER_NAME) + + with pytest.raises(ValueError): + Cluster.from_pb(cluster_pb, instance) + + +def test_cluster___eq__(): + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + cluster1 = _make_cluster(CLUSTER_ID, instance, LOCATION_ID) + cluster2 = _make_cluster(CLUSTER_ID, instance, LOCATION_ID) + assert cluster1 == cluster2 + + +def test_cluster___eq___w_type_differ(): + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + cluster1 = _make_cluster(CLUSTER_ID, instance, LOCATION_ID) + cluster2 = object() + assert cluster1 != cluster2 + + +def test_cluster___ne___w_same_value(): + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + cluster1 = _make_cluster(CLUSTER_ID, instance, LOCATION_ID) + cluster2 = _make_cluster(CLUSTER_ID, instance, LOCATION_ID) + assert not (cluster1 != cluster2) + + +def test_cluster___ne__(): + client = _Client(PROJECT) + instance = _Instance(INSTANCE_ID, client) + cluster1 = _make_cluster("cluster_id1", instance, LOCATION_ID) + cluster2 = _make_cluster("cluster_id2", instance, LOCATION_ID) + assert cluster1 != cluster2 + + +def _make_instance_admin_client(): + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( + BigtableInstanceAdminClient, + ) + + return mock.create_autospec(BigtableInstanceAdminClient) + + +def test_cluster_reload(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.enums import StorageType + from google.cloud.bigtable.enums import Cluster + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + STORAGE_TYPE_SSD = StorageType.SSD + instance = _Instance(INSTANCE_ID, client) + cluster = _make_cluster( + CLUSTER_ID, + instance, + location_id=LOCATION_ID, + serve_nodes=SERVE_NODES, + default_storage_type=STORAGE_TYPE_SSD, + kms_key_name=KMS_KEY_NAME, ) - LOCATION_PATH = "projects/" + PROJECT + "/locations/" - SERVE_NODES = 5 - OP_ID = 5678 - OP_NAME = "operations/projects/{}/instances/{}/clusters/{}/operations/{}".format( - PROJECT, INSTANCE_ID, CLUSTER_ID, OP_ID + + # Create response_pb + LOCATION_ID_FROM_SERVER = "new-location-id" + STATE = Cluster.State.READY + SERVE_NODES_FROM_SERVER = 10 + STORAGE_TYPE_FROM_SERVER = StorageType.HDD + + response_pb = data_v2_pb2.Cluster( + name=cluster.name, + location=LOCATION_PATH + LOCATION_ID_FROM_SERVER, + state=STATE, + serve_nodes=SERVE_NODES_FROM_SERVER, + default_storage_type=STORAGE_TYPE_FROM_SERVER, ) - KEY_RING_ID = "key-ring-id" - CRYPTO_KEY_ID = "crypto-key-id" - KMS_KEY_NAME = f"{LOCATION_PATH}/keyRings/{KEY_RING_ID}/cryptoKeys/{CRYPTO_KEY_ID}" - - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.cluster import Cluster - - return Cluster - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - @staticmethod - def _get_target_client_class(): - from google.cloud.bigtable.client import Client - - return Client - - def _make_client(self, *args, **kwargs): - return self._get_target_client_class()(*args, **kwargs) - - def test_constructor_defaults(self): - client = _Client(self.PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - - cluster = self._make_one(self.CLUSTER_ID, instance) - self.assertEqual(cluster.cluster_id, self.CLUSTER_ID) - self.assertIs(cluster._instance, instance) - self.assertIsNone(cluster.location_id) - self.assertIsNone(cluster.state) - self.assertIsNone(cluster.serve_nodes) - self.assertIsNone(cluster.default_storage_type) - self.assertIsNone(cluster.kms_key_name) - - def test_constructor_non_default(self): - from google.cloud.bigtable.enums import StorageType - from google.cloud.bigtable.enums import Cluster - - STATE = Cluster.State.READY - STORAGE_TYPE_SSD = StorageType.SSD - client = _Client(self.PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - - cluster = self._make_one( - self.CLUSTER_ID, - instance, - location_id=self.LOCATION_ID, - _state=STATE, - serve_nodes=self.SERVE_NODES, - default_storage_type=STORAGE_TYPE_SSD, - kms_key_name=self.KMS_KEY_NAME, - ) - self.assertEqual(cluster.cluster_id, self.CLUSTER_ID) - self.assertIs(cluster._instance, instance) - self.assertEqual(cluster.location_id, self.LOCATION_ID) - self.assertEqual(cluster.state, STATE) - self.assertEqual(cluster.serve_nodes, self.SERVE_NODES) - self.assertEqual(cluster.default_storage_type, STORAGE_TYPE_SSD) - self.assertEqual(cluster.kms_key_name, self.KMS_KEY_NAME) - - def test_name_property(self): - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = _Instance(self.INSTANCE_ID, client) - cluster = self._make_one(self.CLUSTER_ID, instance) - - self.assertEqual(cluster.name, self.CLUSTER_NAME) - - def test_kms_key_name_property(self): - client = _Client(self.PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - - cluster = self._make_one( - self.CLUSTER_ID, instance, kms_key_name=self.KMS_KEY_NAME - ) - - self.assertEqual(cluster.kms_key_name, self.KMS_KEY_NAME) - with pytest.raises(AttributeError): - cluster.kms_key_name = "I'm read only" - - def test_from_pb_success(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable import enums - - client = _Client(self.PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - - location = self.LOCATION_PATH + self.LOCATION_ID - state = enums.Cluster.State.RESIZING - storage_type = enums.StorageType.SSD - cluster_pb = data_v2_pb2.Cluster( - name=self.CLUSTER_NAME, - location=location, - state=state, - serve_nodes=self.SERVE_NODES, - default_storage_type=storage_type, - encryption_config=data_v2_pb2.Cluster.EncryptionConfig( - kms_key_name=self.KMS_KEY_NAME, - ), - ) - - klass = self._get_target_class() - cluster = klass.from_pb(cluster_pb, instance) - self.assertIsInstance(cluster, klass) - self.assertEqual(cluster._instance, instance) - self.assertEqual(cluster.cluster_id, self.CLUSTER_ID) - self.assertEqual(cluster.location_id, self.LOCATION_ID) - self.assertEqual(cluster.state, state) - self.assertEqual(cluster.serve_nodes, self.SERVE_NODES) - self.assertEqual(cluster.default_storage_type, storage_type) - self.assertEqual(cluster.kms_key_name, self.KMS_KEY_NAME) - - def test_from_pb_bad_cluster_name(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - - bad_cluster_name = "BAD_NAME" - - cluster_pb = data_v2_pb2.Cluster(name=bad_cluster_name) - - klass = self._get_target_class() - with self.assertRaises(ValueError): - klass.from_pb(cluster_pb, None) - - def test_from_pb_instance_id_mistmatch(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - - ALT_INSTANCE_ID = "ALT_INSTANCE_ID" - client = _Client(self.PROJECT) - instance = _Instance(ALT_INSTANCE_ID, client) - - self.assertNotEqual(self.INSTANCE_ID, ALT_INSTANCE_ID) - cluster_pb = data_v2_pb2.Cluster(name=self.CLUSTER_NAME) - - klass = self._get_target_class() - with self.assertRaises(ValueError): - klass.from_pb(cluster_pb, instance) - - def test_from_pb_project_mistmatch(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - - ALT_PROJECT = "ALT_PROJECT" - client = _Client(project=ALT_PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - - self.assertNotEqual(self.PROJECT, ALT_PROJECT) - cluster_pb = data_v2_pb2.Cluster(name=self.CLUSTER_NAME) - - klass = self._get_target_class() - with self.assertRaises(ValueError): - klass.from_pb(cluster_pb, instance) - - def test___eq__(self): - client = _Client(self.PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - cluster1 = self._make_one(self.CLUSTER_ID, instance, self.LOCATION_ID) - cluster2 = self._make_one(self.CLUSTER_ID, instance, self.LOCATION_ID) - self.assertEqual(cluster1, cluster2) - - def test___eq__type_differ(self): - client = _Client(self.PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - cluster1 = self._make_one(self.CLUSTER_ID, instance, self.LOCATION_ID) - cluster2 = object() - self.assertNotEqual(cluster1, cluster2) - - def test___ne__same_value(self): - client = _Client(self.PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - cluster1 = self._make_one(self.CLUSTER_ID, instance, self.LOCATION_ID) - cluster2 = self._make_one(self.CLUSTER_ID, instance, self.LOCATION_ID) - comparison_val = cluster1 != cluster2 - self.assertFalse(comparison_val) - - def test___ne__(self): - client = _Client(self.PROJECT) - instance = _Instance(self.INSTANCE_ID, client) - cluster1 = self._make_one("cluster_id1", instance, self.LOCATION_ID) - cluster2 = self._make_one("cluster_id2", instance, self.LOCATION_ID) - self.assertNotEqual(cluster1, cluster2) - - def test_reload(self): - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable.enums import StorageType - from google.cloud.bigtable.enums import Cluster - - api = mock.create_autospec(BigtableInstanceAdminClient) - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - STORAGE_TYPE_SSD = StorageType.SSD - instance = _Instance(self.INSTANCE_ID, client) - cluster = self._make_one( - self.CLUSTER_ID, - instance, - location_id=self.LOCATION_ID, - serve_nodes=self.SERVE_NODES, - default_storage_type=STORAGE_TYPE_SSD, - kms_key_name=self.KMS_KEY_NAME, - ) - - # Create response_pb - LOCATION_ID_FROM_SERVER = "new-location-id" - STATE = Cluster.State.READY - SERVE_NODES_FROM_SERVER = 10 - STORAGE_TYPE_FROM_SERVER = StorageType.HDD - - response_pb = data_v2_pb2.Cluster( - name=cluster.name, - location=self.LOCATION_PATH + LOCATION_ID_FROM_SERVER, - state=STATE, - serve_nodes=SERVE_NODES_FROM_SERVER, - default_storage_type=STORAGE_TYPE_FROM_SERVER, - ) - - # Patch the stub used by the API method. - client._instance_admin_client = api - instance_stub = client._instance_admin_client - - instance_stub.get_cluster.side_effect = [response_pb] - - # Create expected_result. - expected_result = None # reload() has no return value. - - # Check Cluster optional config values before. - self.assertEqual(cluster.location_id, self.LOCATION_ID) - self.assertIsNone(cluster.state) - self.assertEqual(cluster.serve_nodes, self.SERVE_NODES) - self.assertEqual(cluster.default_storage_type, STORAGE_TYPE_SSD) - - # Perform the method and check the result. - result = cluster.reload() - self.assertEqual(result, expected_result) - self.assertEqual(cluster.location_id, LOCATION_ID_FROM_SERVER) - self.assertEqual(cluster.state, STATE) - self.assertEqual(cluster.serve_nodes, SERVE_NODES_FROM_SERVER) - self.assertEqual(cluster.default_storage_type, STORAGE_TYPE_FROM_SERVER) - self.assertEqual(cluster.kms_key_name, None) - - def test_exists(self): - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable.instance import Instance - from google.api_core import exceptions - - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = Instance(self.INSTANCE_ID, client) - - # Create response_pb - cluster_name = client.instance_admin_client.cluster_path( - self.PROJECT, self.INSTANCE_ID, self.CLUSTER_ID - ) - response_pb = data_v2_pb2.Cluster(name=cluster_name) - - # Patch the stub used by the API method. - client._instance_admin_client = instance_api - bigtable_instance_stub = client._instance_admin_client - - bigtable_instance_stub.get_cluster.side_effect = [ - response_pb, - exceptions.NotFound("testing"), - exceptions.BadRequest("testing"), - ] - - # Perform the method and check the result. - non_existing_cluster_id = "cluster-id-2" - alt_cluster_1 = self._make_one(self.CLUSTER_ID, instance) - alt_cluster_2 = self._make_one(non_existing_cluster_id, instance) - self.assertTrue(alt_cluster_1.exists()) - self.assertFalse(alt_cluster_2.exists()) - with self.assertRaises(exceptions.BadRequest): - alt_cluster_1.exists() - - def test_create(self): - import datetime - from google.longrunning import operations_pb2 - from google.protobuf.any_pb2 import Any - from google.cloud.bigtable_admin_v2.types import ( - bigtable_instance_admin as messages_v2_pb2, - ) - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable.instance import Instance - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.cloud.bigtable_admin_v2.types import instance as instance_v2_pb2 - from google.cloud.bigtable.enums import StorageType - - NOW = datetime.datetime.utcnow() - NOW_PB = _datetime_to_pb_timestamp(NOW) - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - STORAGE_TYPE_SSD = StorageType.SSD - LOCATION = self.LOCATION_PATH + self.LOCATION_ID - instance = Instance(self.INSTANCE_ID, client) - cluster = self._make_one( - self.CLUSTER_ID, - instance, - location_id=self.LOCATION_ID, - serve_nodes=self.SERVE_NODES, - default_storage_type=STORAGE_TYPE_SSD, - ) - expected_request_cluster = instance_v2_pb2.Cluster( - location=LOCATION, - serve_nodes=cluster.serve_nodes, - default_storage_type=cluster.default_storage_type, - ) - expected_request = { - "request": { - "parent": instance.name, - "cluster_id": self.CLUSTER_ID, - "cluster": expected_request_cluster, - } - } - name = instance.name - metadata = messages_v2_pb2.CreateClusterMetadata(request_time=NOW_PB) - type_url = "type.googleapis.com/{}".format( - messages_v2_pb2.CreateClusterMetadata._meta._pb.DESCRIPTOR.full_name - ) - response_pb = operations_pb2.Operation( - name=self.OP_NAME, - metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), - ) - - # Patch the stub used by the API method. - api = mock.create_autospec(BigtableInstanceAdminClient) - api.common_location_path.return_value = LOCATION - client._instance_admin_client = api - cluster._instance._client = client - cluster._instance._client.instance_admin_client.instance_path.return_value = ( - name - ) - client._instance_admin_client.create_cluster.return_value = response_pb - # Perform the method and check the result. - cluster.create() - - actual_request = client._instance_admin_client.create_cluster.call_args_list[ - 0 - ].kwargs - self.assertEqual(actual_request, expected_request) - - def test_create_w_cmek(self): - import datetime - from google.longrunning import operations_pb2 - from google.protobuf.any_pb2 import Any - from google.cloud.bigtable_admin_v2.types import ( - bigtable_instance_admin as messages_v2_pb2, - ) - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable.instance import Instance - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.cloud.bigtable_admin_v2.types import instance as instance_v2_pb2 - from google.cloud.bigtable.enums import StorageType - - NOW = datetime.datetime.utcnow() - NOW_PB = _datetime_to_pb_timestamp(NOW) - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - STORAGE_TYPE_SSD = StorageType.SSD - LOCATION = self.LOCATION_PATH + self.LOCATION_ID - instance = Instance(self.INSTANCE_ID, client) - cluster = self._make_one( - self.CLUSTER_ID, - instance, - location_id=self.LOCATION_ID, - serve_nodes=self.SERVE_NODES, - default_storage_type=STORAGE_TYPE_SSD, - kms_key_name=self.KMS_KEY_NAME, - ) - expected_request_cluster = instance_v2_pb2.Cluster( - location=LOCATION, - serve_nodes=cluster.serve_nodes, - default_storage_type=cluster.default_storage_type, - encryption_config=instance_v2_pb2.Cluster.EncryptionConfig( - kms_key_name=self.KMS_KEY_NAME, - ), - ) - expected_request = { - "request": { - "parent": instance.name, - "cluster_id": self.CLUSTER_ID, - "cluster": expected_request_cluster, - } - } - name = instance.name - metadata = messages_v2_pb2.CreateClusterMetadata(request_time=NOW_PB) - type_url = "type.googleapis.com/{}".format( - messages_v2_pb2.CreateClusterMetadata._meta._pb.DESCRIPTOR.full_name - ) - response_pb = operations_pb2.Operation( - name=self.OP_NAME, - metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), - ) - - # Patch the stub used by the API method. - api = mock.create_autospec(BigtableInstanceAdminClient) - api.common_location_path.return_value = LOCATION - client._instance_admin_client = api - cluster._instance._client = client - cluster._instance._client.instance_admin_client.instance_path.return_value = ( - name - ) - client._instance_admin_client.create_cluster.return_value = response_pb - # Perform the method and check the result. - cluster.create() - - actual_request = client._instance_admin_client.create_cluster.call_args_list[ - 0 - ].kwargs - self.assertEqual(actual_request, expected_request) - - def test_update(self): - import datetime - from google.longrunning import operations_pb2 - from google.protobuf.any_pb2 import Any - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable_admin_v2.types import ( - bigtable_instance_admin as messages_v2_pb2, - ) - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.cloud.bigtable.enums import StorageType - - NOW = datetime.datetime.utcnow() - NOW_PB = _datetime_to_pb_timestamp(NOW) - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - STORAGE_TYPE_SSD = StorageType.SSD - instance = _Instance(self.INSTANCE_ID, client) - cluster = self._make_one( - self.CLUSTER_ID, - instance, - location_id=self.LOCATION_ID, - serve_nodes=self.SERVE_NODES, - default_storage_type=STORAGE_TYPE_SSD, - ) - # Create expected_request - expected_request = { - "request": { - "name": "projects/project/instances/instance-id/clusters/cluster-id", - "serve_nodes": 5, - "location": None, - } - } - metadata = messages_v2_pb2.UpdateClusterMetadata(request_time=NOW_PB) - type_url = "type.googleapis.com/{}".format( - messages_v2_pb2.UpdateClusterMetadata._meta._pb.DESCRIPTOR.full_name - ) - response_pb = operations_pb2.Operation( - name=self.OP_NAME, - metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), - ) - - # Patch the stub used by the API method. - api = mock.create_autospec(BigtableInstanceAdminClient) - client._instance_admin_client = api - cluster._instance._client.instance_admin_client.cluster_path.return_value = ( - "projects/project/instances/instance-id/clusters/cluster-id" - ) - # Perform the method and check the result. - client._instance_admin_client.update_cluster.return_value = response_pb - cluster.update() - - actual_request = client._instance_admin_client.update_cluster.call_args_list[ - 0 - ].kwargs - - self.assertEqual(actual_request, expected_request) - - def test_delete(self): - from google.protobuf import empty_pb2 - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - - api = mock.create_autospec(BigtableInstanceAdminClient) - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = _Instance(self.INSTANCE_ID, client) - cluster = self._make_one(self.CLUSTER_ID, instance, self.LOCATION_ID) - - # Create response_pb - response_pb = empty_pb2.Empty() - - # Patch the stub used by the API method. - client._instance_admin_client = api - instance_admin_client = client._instance_admin_client - instance_stub = instance_admin_client - instance_stub.delete_cluster.side_effect = [response_pb] - - # Create expected_result. - expected_result = None # delete() has no return value. - - # Perform the method and check the result. - result = cluster.delete() - - self.assertEqual(result, expected_result) + + # Patch the stub used by the API method. + api = client._instance_admin_client = _make_instance_admin_client() + api.get_cluster.side_effect = [response_pb] + + # Create expected_result. + expected_result = None # reload() has no return value. + + # Check Cluster optional config values before. + assert cluster.location_id == LOCATION_ID + assert cluster.state is None + assert cluster.serve_nodes == SERVE_NODES + assert cluster.default_storage_type == STORAGE_TYPE_SSD + + # Perform the method and check the result. + result = cluster.reload() + assert result == expected_result + assert cluster.location_id == LOCATION_ID_FROM_SERVER + assert cluster.state == STATE + assert cluster.serve_nodes == SERVE_NODES_FROM_SERVER + assert cluster.default_storage_type == STORAGE_TYPE_FROM_SERVER + assert cluster.kms_key_name is None + + api.get_cluster.assert_called_once_with(request={"name": cluster.name}) + + +def test_cluster_exists_hit(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.instance import Instance + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = Instance(INSTANCE_ID, client) + + cluster_name = client.instance_admin_client.cluster_path( + PROJECT, INSTANCE_ID, CLUSTER_ID + ) + response_pb = data_v2_pb2.Cluster(name=cluster_name) + + api = client._instance_admin_client = _make_instance_admin_client() + api.get_cluster.return_value = response_pb + + cluster = _make_cluster(CLUSTER_ID, instance) + + assert cluster.exists() + + api.get_cluster.assert_called_once_with(request={"name": cluster.name}) + + +def test_cluster_exists_miss(): + from google.cloud.bigtable.instance import Instance + from google.api_core import exceptions + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = Instance(INSTANCE_ID, client) + + api = client._instance_admin_client = _make_instance_admin_client() + api.get_cluster.side_effect = exceptions.NotFound("testing") + + non_existing_cluster_id = "nonesuch-cluster-2" + cluster = _make_cluster(non_existing_cluster_id, instance) + + assert not cluster.exists() + + api.get_cluster.assert_called_once_with(request={"name": cluster.name}) + + +def test_cluster_exists_w_error(): + from google.cloud.bigtable.instance import Instance + from google.api_core import exceptions + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = Instance(INSTANCE_ID, client) + + api = client._instance_admin_client = _make_instance_admin_client() + api.get_cluster.side_effect = exceptions.BadRequest("testing") + + cluster = _make_cluster(CLUSTER_ID, instance) + + with pytest.raises(exceptions.BadRequest): + cluster.exists() + + api.get_cluster.assert_called_once_with(request={"name": cluster.name}) + + +def test_cluster_create(): + import datetime + from google.longrunning import operations_pb2 + from google.protobuf.any_pb2 import Any + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable_admin_v2.types import instance as instance_v2_pb2 + from google.cloud.bigtable.enums import StorageType + + NOW = datetime.datetime.utcnow() + NOW_PB = _datetime_to_pb_timestamp(NOW) + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + STORAGE_TYPE_SSD = StorageType.SSD + LOCATION = LOCATION_PATH + LOCATION_ID + instance = Instance(INSTANCE_ID, client) + cluster = _make_cluster( + CLUSTER_ID, + instance, + location_id=LOCATION_ID, + serve_nodes=SERVE_NODES, + default_storage_type=STORAGE_TYPE_SSD, + ) + metadata = messages_v2_pb2.CreateClusterMetadata(request_time=NOW_PB) + type_url = "type.googleapis.com/{}".format( + messages_v2_pb2.CreateClusterMetadata._meta._pb.DESCRIPTOR.full_name + ) + response_pb = operations_pb2.Operation( + name=OP_NAME, + metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), + ) + + api = client._instance_admin_client = _make_instance_admin_client() + api.common_location_path.return_value = LOCATION + api.instance_path.return_value = instance.name + api.create_cluster.return_value = response_pb + + cluster.create() + + expected_request_cluster = instance_v2_pb2.Cluster( + location=LOCATION, + serve_nodes=cluster.serve_nodes, + default_storage_type=cluster.default_storage_type, + ) + expected_request = { + "parent": instance.name, + "cluster_id": CLUSTER_ID, + "cluster": expected_request_cluster, + } + api.create_cluster.assert_called_once_with(request=expected_request) + + +def test_cluster_create_w_cmek(): + import datetime + from google.longrunning import operations_pb2 + from google.protobuf.any_pb2 import Any + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable_admin_v2.types import instance as instance_v2_pb2 + from google.cloud.bigtable.enums import StorageType + + NOW = datetime.datetime.utcnow() + NOW_PB = _datetime_to_pb_timestamp(NOW) + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + STORAGE_TYPE_SSD = StorageType.SSD + LOCATION = LOCATION_PATH + LOCATION_ID + instance = Instance(INSTANCE_ID, client) + cluster = _make_cluster( + CLUSTER_ID, + instance, + location_id=LOCATION_ID, + serve_nodes=SERVE_NODES, + default_storage_type=STORAGE_TYPE_SSD, + kms_key_name=KMS_KEY_NAME, + ) + name = instance.name + metadata = messages_v2_pb2.CreateClusterMetadata(request_time=NOW_PB) + type_url = "type.googleapis.com/{}".format( + messages_v2_pb2.CreateClusterMetadata._meta._pb.DESCRIPTOR.full_name + ) + response_pb = operations_pb2.Operation( + name=OP_NAME, + metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), + ) + + api = client._instance_admin_client = _make_instance_admin_client() + api.common_location_path.return_value = LOCATION + api.instance_path.return_value = name + api.create_cluster.return_value = response_pb + + cluster.create() + + expected_request_cluster = instance_v2_pb2.Cluster( + location=LOCATION, + serve_nodes=cluster.serve_nodes, + default_storage_type=cluster.default_storage_type, + encryption_config=instance_v2_pb2.Cluster.EncryptionConfig( + kms_key_name=KMS_KEY_NAME, + ), + ) + expected_request = { + "parent": instance.name, + "cluster_id": CLUSTER_ID, + "cluster": expected_request_cluster, + } + api.create_cluster.assert_called_once_with(request=expected_request) + + +def test_cluster_update(): + import datetime + from google.longrunning import operations_pb2 + from google.protobuf.any_pb2 import Any + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) + from google.cloud.bigtable.enums import StorageType + + NOW = datetime.datetime.utcnow() + NOW_PB = _datetime_to_pb_timestamp(NOW) + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + STORAGE_TYPE_SSD = StorageType.SSD + instance = _Instance(INSTANCE_ID, client) + cluster = _make_cluster( + CLUSTER_ID, + instance, + location_id=LOCATION_ID, + serve_nodes=SERVE_NODES, + default_storage_type=STORAGE_TYPE_SSD, + ) + metadata = messages_v2_pb2.UpdateClusterMetadata(request_time=NOW_PB) + type_url = "type.googleapis.com/{}".format( + messages_v2_pb2.UpdateClusterMetadata._meta._pb.DESCRIPTOR.full_name + ) + response_pb = operations_pb2.Operation( + name=OP_NAME, + metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), + ) + + api = client._instance_admin_client = _make_instance_admin_client() + api.cluster_path.return_value = ( + "projects/project/instances/instance-id/clusters/cluster-id" + ) + api.update_cluster.return_value = response_pb + + cluster.update() + + expected_request = { + "name": "projects/project/instances/instance-id/clusters/cluster-id", + "serve_nodes": 5, + "location": None, + } + api.update_cluster.assert_called_once_with(request=expected_request) + + +def test_cluster_delete(): + from google.protobuf import empty_pb2 + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = _Instance(INSTANCE_ID, client) + cluster = _make_cluster(CLUSTER_ID, instance, LOCATION_ID) + + api = client._instance_admin_client = _make_instance_admin_client() + api.delete_cluster.side_effect = [empty_pb2.Empty()] + + # Perform the method and check the result. + assert cluster.delete() is None + + api.delete_cluster.assert_called_once_with(request={"name": cluster.name}) class _Instance(object): From 50a94b888204fa9d9dbc8029fb48259acc10eb85 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 20 Oct 2021 16:02:42 -0400 Subject: [PATCH 06/16] tests: refactor 'column_family' unit tests w/ pytest idioms --- tests/unit/test_column_family.py | 1184 +++++++++++++++--------------- 1 file changed, 591 insertions(+), 593 deletions(-) diff --git a/tests/unit/test_column_family.py b/tests/unit/test_column_family.py index 601c37cf5..9d4632e2a 100644 --- a/tests/unit/test_column_family.py +++ b/tests/unit/test_column_family.py @@ -12,609 +12,607 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest - import mock +import pytest from ._testing import _make_credentials -class TestMaxVersionsGCRule(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.column_family import MaxVersionsGCRule - - return MaxVersionsGCRule - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test___eq__type_differ(self): - gc_rule1 = self._make_one(10) - self.assertNotEqual(gc_rule1, object()) - self.assertEqual(gc_rule1, mock.ANY) - - def test___eq__same_value(self): - gc_rule1 = self._make_one(2) - gc_rule2 = self._make_one(2) - self.assertEqual(gc_rule1, gc_rule2) - - def test___ne__same_value(self): - gc_rule1 = self._make_one(99) - gc_rule2 = self._make_one(99) - comparison_val = gc_rule1 != gc_rule2 - self.assertFalse(comparison_val) - - def test_to_pb(self): - max_num_versions = 1337 - gc_rule = self._make_one(max_num_versions=max_num_versions) - pb_val = gc_rule.to_pb() - expected = _GcRulePB(max_num_versions=max_num_versions) - self.assertEqual(pb_val, expected) - - -class TestMaxAgeGCRule(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.column_family import MaxAgeGCRule - - return MaxAgeGCRule - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test___eq__type_differ(self): - max_age = object() - gc_rule1 = self._make_one(max_age=max_age) - gc_rule2 = object() - self.assertNotEqual(gc_rule1, gc_rule2) - - def test___eq__same_value(self): - max_age = object() - gc_rule1 = self._make_one(max_age=max_age) - gc_rule2 = self._make_one(max_age=max_age) - self.assertEqual(gc_rule1, gc_rule2) +def _make_max_versions_gc_rule(*args, **kwargs): + from google.cloud.bigtable.column_family import MaxVersionsGCRule + + return MaxVersionsGCRule(*args, **kwargs) + + +def test_max_versions_gc_rule___eq__type_differ(): + gc_rule1 = _make_max_versions_gc_rule(10) + assert gc_rule1 != object() + assert gc_rule1 == mock.ANY + + +def test_max_versions_gc_rule___eq__same_value(): + gc_rule1 = _make_max_versions_gc_rule(2) + gc_rule2 = _make_max_versions_gc_rule(2) + assert gc_rule1 == gc_rule2 + + +def test_max_versions_gc_rule___ne__same_value(): + gc_rule1 = _make_max_versions_gc_rule(99) + gc_rule2 = _make_max_versions_gc_rule(99) + assert not (gc_rule1 != gc_rule2) + + +def test_max_versions_gc_rule_to_pb(): + max_num_versions = 1337 + gc_rule = _make_max_versions_gc_rule(max_num_versions=max_num_versions) + pb_val = gc_rule.to_pb() + expected = _GcRulePB(max_num_versions=max_num_versions) + assert pb_val == expected + + +def _make_max_age_gc_rule(*args, **kwargs): + from google.cloud.bigtable.column_family import MaxAgeGCRule + + return MaxAgeGCRule(*args, **kwargs) + + +def test_max_age_gc_rule___eq__type_differ(): + max_age = object() + gc_rule1 = _make_max_age_gc_rule(max_age=max_age) + gc_rule2 = object() + assert gc_rule1 != gc_rule2 + + +def test_max_age_gc_rule___eq__same_value(): + max_age = object() + gc_rule1 = _make_max_age_gc_rule(max_age=max_age) + gc_rule2 = _make_max_age_gc_rule(max_age=max_age) + assert gc_rule1 == gc_rule2 + + +def test_max_age_gc_rule___ne__same_value(): + max_age = object() + gc_rule1 = _make_max_age_gc_rule(max_age=max_age) + gc_rule2 = _make_max_age_gc_rule(max_age=max_age) + assert not (gc_rule1 != gc_rule2) + + +def test_max_age_gc_rule_to_pb(): + import datetime + from google.protobuf import duration_pb2 + + max_age = datetime.timedelta(seconds=1) + duration = duration_pb2.Duration(seconds=1) + gc_rule = _make_max_age_gc_rule(max_age=max_age) + pb_val = gc_rule.to_pb() + assert pb_val == _GcRulePB(max_age=duration) + + +def _make_gc_rule_union(*args, **kwargs): + from google.cloud.bigtable.column_family import GCRuleUnion + + return GCRuleUnion(*args, **kwargs) + + +def test_gc_rule_union_constructor(): + rules = object() + rule_union = _make_gc_rule_union(rules) + assert rule_union.rules is rules + + +def test_gc_rule_union___eq__(): + rules = object() + gc_rule1 = _make_gc_rule_union(rules) + gc_rule2 = _make_gc_rule_union(rules) + assert gc_rule1 == gc_rule2 + + +def test_gc_rule_union___eq__type_differ(): + rules = object() + gc_rule1 = _make_gc_rule_union(rules) + gc_rule2 = object() + assert gc_rule1 != gc_rule2 + + +def test_gc_rule_union___ne__same_value(): + rules = object() + gc_rule1 = _make_gc_rule_union(rules) + gc_rule2 = _make_gc_rule_union(rules) + assert not (gc_rule1 != gc_rule2) + + +def test_gc_rule_union_to_pb(): + import datetime + from google.protobuf import duration_pb2 + from google.cloud.bigtable.column_family import MaxAgeGCRule + from google.cloud.bigtable.column_family import MaxVersionsGCRule + + max_num_versions = 42 + rule1 = MaxVersionsGCRule(max_num_versions) + pb_rule1 = _GcRulePB(max_num_versions=max_num_versions) + + max_age = datetime.timedelta(seconds=1) + rule2 = MaxAgeGCRule(max_age) + pb_rule2 = _GcRulePB(max_age=duration_pb2.Duration(seconds=1)) + + rule3 = _make_gc_rule_union(rules=[rule1, rule2]) + pb_rule3 = _GcRulePB(union=_GcRuleUnionPB(rules=[pb_rule1, pb_rule2])) + + gc_rule_pb = rule3.to_pb() + assert gc_rule_pb == pb_rule3 + + +def test_gc_rule_union_to_pb_nested(): + import datetime + from google.protobuf import duration_pb2 + from google.cloud.bigtable.column_family import MaxAgeGCRule + from google.cloud.bigtable.column_family import MaxVersionsGCRule + + max_num_versions1 = 42 + rule1 = MaxVersionsGCRule(max_num_versions1) + pb_rule1 = _GcRulePB(max_num_versions=max_num_versions1) + + max_age = datetime.timedelta(seconds=1) + rule2 = MaxAgeGCRule(max_age) + pb_rule2 = _GcRulePB(max_age=duration_pb2.Duration(seconds=1)) + + rule3 = _make_gc_rule_union(rules=[rule1, rule2]) + pb_rule3 = _GcRulePB(union=_GcRuleUnionPB(rules=[pb_rule1, pb_rule2])) + + max_num_versions2 = 1337 + rule4 = MaxVersionsGCRule(max_num_versions2) + pb_rule4 = _GcRulePB(max_num_versions=max_num_versions2) + + rule5 = _make_gc_rule_union(rules=[rule3, rule4]) + pb_rule5 = _GcRulePB(union=_GcRuleUnionPB(rules=[pb_rule3, pb_rule4])) + + gc_rule_pb = rule5.to_pb() + assert gc_rule_pb == pb_rule5 + + +def _make_gc_rule_intersection(*args, **kwargs): + from google.cloud.bigtable.column_family import GCRuleIntersection + + return GCRuleIntersection(*args, **kwargs) + + +def test_gc_rule_intersection_constructor(): + rules = object() + rule_intersection = _make_gc_rule_intersection(rules) + assert rule_intersection.rules is rules + + +def test_gc_rule_intersection___eq__(): + rules = object() + gc_rule1 = _make_gc_rule_intersection(rules) + gc_rule2 = _make_gc_rule_intersection(rules) + assert gc_rule1 == gc_rule2 + + +def test_gc_rule_intersection___eq__type_differ(): + rules = object() + gc_rule1 = _make_gc_rule_intersection(rules) + gc_rule2 = object() + assert gc_rule1 != gc_rule2 + + +def test_gc_rule_intersection___ne__same_value(): + rules = object() + gc_rule1 = _make_gc_rule_intersection(rules) + gc_rule2 = _make_gc_rule_intersection(rules) + assert not (gc_rule1 != gc_rule2) + + +def test_gc_rule_intersection_to_pb(): + import datetime + from google.protobuf import duration_pb2 + from google.cloud.bigtable.column_family import MaxAgeGCRule + from google.cloud.bigtable.column_family import MaxVersionsGCRule + + max_num_versions = 42 + rule1 = MaxVersionsGCRule(max_num_versions) + pb_rule1 = _GcRulePB(max_num_versions=max_num_versions) + + max_age = datetime.timedelta(seconds=1) + rule2 = MaxAgeGCRule(max_age) + pb_rule2 = _GcRulePB(max_age=duration_pb2.Duration(seconds=1)) + + rule3 = _make_gc_rule_intersection(rules=[rule1, rule2]) + pb_rule3 = _GcRulePB(intersection=_GcRuleIntersectionPB(rules=[pb_rule1, pb_rule2])) + + gc_rule_pb = rule3.to_pb() + assert gc_rule_pb == pb_rule3 + + +def test_gc_rule_intersection_to_pb_nested(): + import datetime + from google.protobuf import duration_pb2 + from google.cloud.bigtable.column_family import MaxAgeGCRule + from google.cloud.bigtable.column_family import MaxVersionsGCRule + + max_num_versions1 = 42 + rule1 = MaxVersionsGCRule(max_num_versions1) + pb_rule1 = _GcRulePB(max_num_versions=max_num_versions1) + + max_age = datetime.timedelta(seconds=1) + rule2 = MaxAgeGCRule(max_age) + pb_rule2 = _GcRulePB(max_age=duration_pb2.Duration(seconds=1)) + + rule3 = _make_gc_rule_intersection(rules=[rule1, rule2]) + pb_rule3 = _GcRulePB(intersection=_GcRuleIntersectionPB(rules=[pb_rule1, pb_rule2])) + + max_num_versions2 = 1337 + rule4 = MaxVersionsGCRule(max_num_versions2) + pb_rule4 = _GcRulePB(max_num_versions=max_num_versions2) + + rule5 = _make_gc_rule_intersection(rules=[rule3, rule4]) + pb_rule5 = _GcRulePB(intersection=_GcRuleIntersectionPB(rules=[pb_rule3, pb_rule4])) + + gc_rule_pb = rule5.to_pb() + assert gc_rule_pb == pb_rule5 + + +def _make_column_family(*args, **kwargs): + from google.cloud.bigtable.column_family import ColumnFamily + + return ColumnFamily(*args, **kwargs) + + +def _make_client(*args, **kwargs): + from google.cloud.bigtable.client import Client + + return Client(*args, **kwargs) + + +def test_column_family_constructor(): + column_family_id = u"column-family-id" + table = object() + gc_rule = object() + column_family = _make_column_family(column_family_id, table, gc_rule=gc_rule) + + assert column_family.column_family_id == column_family_id + assert column_family._table is table + assert column_family.gc_rule is gc_rule + + +def test_column_family_name_property(): + column_family_id = u"column-family-id" + table_name = "table_name" + table = _Table(table_name) + column_family = _make_column_family(column_family_id, table) + + expected_name = table_name + "/columnFamilies/" + column_family_id + assert column_family.name == expected_name + + +def test_column_family___eq__(): + column_family_id = "column_family_id" + table = object() + gc_rule = object() + column_family1 = _make_column_family(column_family_id, table, gc_rule=gc_rule) + column_family2 = _make_column_family(column_family_id, table, gc_rule=gc_rule) + assert column_family1 == column_family2 + + +def test_column_family___eq__type_differ(): + column_family1 = _make_column_family("column_family_id", None) + column_family2 = object() + assert column_family1 != column_family2 + + +def test_column_family___ne__same_value(): + column_family_id = "column_family_id" + table = object() + gc_rule = object() + column_family1 = _make_column_family(column_family_id, table, gc_rule=gc_rule) + column_family2 = _make_column_family(column_family_id, table, gc_rule=gc_rule) + assert not (column_family1 != column_family2) + + +def test_column_family___ne__(): + column_family1 = _make_column_family("column_family_id1", None) + column_family2 = _make_column_family("column_family_id2", None) + assert column_family1 != column_family2 + + +def test_column_family_to_pb_no_rules(): + column_family = _make_column_family("column_family_id", None) + pb_val = column_family.to_pb() + expected = _ColumnFamilyPB() + assert pb_val == expected + + +def test_column_family_to_pb_with_rule(): + from google.cloud.bigtable.column_family import MaxVersionsGCRule + + gc_rule = MaxVersionsGCRule(1) + column_family = _make_column_family("column_family_id", None, gc_rule=gc_rule) + pb_val = column_family.to_pb() + expected = _ColumnFamilyPB(gc_rule=gc_rule.to_pb()) + assert pb_val == expected + + +def _create_test_helper(gc_rule=None): + from google.cloud.bigtable_admin_v2.types import ( + bigtable_table_admin as table_admin_v2_pb2, + ) + from tests.unit._testing import _FakeStub + from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( + BigtableTableAdminClient, + ) + + project_id = "project-id" + zone = "zone" + cluster_id = "cluster-id" + table_id = "table-id" + column_family_id = "column-family-id" + table_name = ( + "projects/" + + project_id + + "/zones/" + + zone + + "/clusters/" + + cluster_id + + "/tables/" + + table_id + ) + + api = mock.create_autospec(BigtableTableAdminClient) + + credentials = _make_credentials() + client = _make_client(project=project_id, credentials=credentials, admin=True) + table = _Table(table_name, client=client) + column_family = _make_column_family(column_family_id, table, gc_rule=gc_rule) + + # Create request_pb + if gc_rule is None: + column_family_pb = _ColumnFamilyPB() + else: + column_family_pb = _ColumnFamilyPB(gc_rule=gc_rule.to_pb()) + request_pb = table_admin_v2_pb2.ModifyColumnFamiliesRequest(name=table_name) + modification = table_admin_v2_pb2.ModifyColumnFamiliesRequest.Modification() + modification.id = column_family_id + modification.create = column_family_pb + request_pb.modifications.append(modification) + + # Create response_pb + response_pb = _ColumnFamilyPB() + + # Patch the stub used by the API method. + stub = _FakeStub(response_pb) + client._table_admin_client = api + client._table_admin_client.transport.create = stub + + # Create expected_result. + expected_result = None # create() has no return value. + + # Perform the method and check the result. + assert stub.results == (response_pb,) + result = column_family.create() + assert result == expected_result + + +def test_column_family_create(): + _create_test_helper(gc_rule=None) + + +def test_column_family_create_with_gc_rule(): + from google.cloud.bigtable.column_family import MaxVersionsGCRule + + gc_rule = MaxVersionsGCRule(1337) + _create_test_helper(gc_rule=gc_rule) + + +def _update_test_helper(gc_rule=None): + from tests.unit._testing import _FakeStub + from google.cloud.bigtable_admin_v2.types import ( + bigtable_table_admin as table_admin_v2_pb2, + ) + from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( + BigtableTableAdminClient, + ) + + project_id = "project-id" + zone = "zone" + cluster_id = "cluster-id" + table_id = "table-id" + column_family_id = "column-family-id" + table_name = ( + "projects/" + + project_id + + "/zones/" + + zone + + "/clusters/" + + cluster_id + + "/tables/" + + table_id + ) + + api = mock.create_autospec(BigtableTableAdminClient) + credentials = _make_credentials() + client = _make_client(project=project_id, credentials=credentials, admin=True) + table = _Table(table_name, client=client) + column_family = _make_column_family(column_family_id, table, gc_rule=gc_rule) + + # Create request_pb + if gc_rule is None: + column_family_pb = _ColumnFamilyPB() + else: + column_family_pb = _ColumnFamilyPB(gc_rule=gc_rule.to_pb()) + request_pb = table_admin_v2_pb2.ModifyColumnFamiliesRequest(name=table_name) + modification = table_admin_v2_pb2.ModifyColumnFamiliesRequest.Modification() + modification.id = column_family_id + modification.update = column_family_pb + request_pb.modifications.append(modification) + + # Create response_pb + response_pb = _ColumnFamilyPB() + + # Patch the stub used by the API method. + stub = _FakeStub(response_pb) + client._table_admin_client = api + client._table_admin_client.transport.update = stub + + # Create expected_result. + expected_result = None # update() has no return value. + + # Perform the method and check the result. + assert stub.results == (response_pb,) + result = column_family.update() + assert result == expected_result + + +def test_column_family_update(): + _update_test_helper(gc_rule=None) + + +def test_column_family_update_with_gc_rule(): + from google.cloud.bigtable.column_family import MaxVersionsGCRule + + gc_rule = MaxVersionsGCRule(1337) + _update_test_helper(gc_rule=gc_rule) + + +def test_column_family_delete(): + from google.protobuf import empty_pb2 + from google.cloud.bigtable_admin_v2.types import ( + bigtable_table_admin as table_admin_v2_pb2, + ) + from tests.unit._testing import _FakeStub + from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( + BigtableTableAdminClient, + ) + + project_id = "project-id" + zone = "zone" + cluster_id = "cluster-id" + table_id = "table-id" + column_family_id = "column-family-id" + table_name = ( + "projects/" + + project_id + + "/zones/" + + zone + + "/clusters/" + + cluster_id + + "/tables/" + + table_id + ) + + api = mock.create_autospec(BigtableTableAdminClient) + credentials = _make_credentials() + client = _make_client(project=project_id, credentials=credentials, admin=True) + table = _Table(table_name, client=client) + column_family = _make_column_family(column_family_id, table) + + # Create request_pb + request_pb = table_admin_v2_pb2.ModifyColumnFamiliesRequest(name=table_name) + modification = table_admin_v2_pb2.ModifyColumnFamiliesRequest.Modification( + id=column_family_id, drop=True + ) + request_pb.modifications.append(modification) + + # Create response_pb + response_pb = empty_pb2.Empty() + + # Patch the stub used by the API method. + stub = _FakeStub(response_pb) + client._table_admin_client = api + client._table_admin_client.transport.delete = stub + + # Create expected_result. + expected_result = None # delete() has no return value. + + # Perform the method and check the result. + assert stub.results == (response_pb,) + result = column_family.delete() + assert result == expected_result + + +def test__gc_rule_from_pb_empty(): + from google.cloud.bigtable.column_family import _gc_rule_from_pb + + gc_rule_pb = _GcRulePB() + assert _gc_rule_from_pb(gc_rule_pb) is None + + +def test__gc_rule_from_pb_max_num_versions(): + from google.cloud.bigtable.column_family import _gc_rule_from_pb + from google.cloud.bigtable.column_family import MaxVersionsGCRule + + orig_rule = MaxVersionsGCRule(1) + gc_rule_pb = orig_rule.to_pb() + result = _gc_rule_from_pb(gc_rule_pb) + assert isinstance(result, MaxVersionsGCRule) + assert result == orig_rule + + +def test__gc_rule_from_pb_max_age(): + import datetime + from google.cloud.bigtable.column_family import _gc_rule_from_pb + from google.cloud.bigtable.column_family import MaxAgeGCRule + + orig_rule = MaxAgeGCRule(datetime.timedelta(seconds=1)) + gc_rule_pb = orig_rule.to_pb() + result = _gc_rule_from_pb(gc_rule_pb) + assert isinstance(result, MaxAgeGCRule) + assert result == orig_rule + + +def test__gc_rule_from_pb_union(): + import datetime + from google.cloud.bigtable.column_family import _gc_rule_from_pb + from google.cloud.bigtable.column_family import GCRuleUnion + from google.cloud.bigtable.column_family import MaxAgeGCRule + from google.cloud.bigtable.column_family import MaxVersionsGCRule + + rule1 = MaxVersionsGCRule(1) + rule2 = MaxAgeGCRule(datetime.timedelta(seconds=1)) + orig_rule = GCRuleUnion([rule1, rule2]) + gc_rule_pb = orig_rule.to_pb() + result = _gc_rule_from_pb(gc_rule_pb) + assert isinstance(result, GCRuleUnion) + assert result == orig_rule + + +def test__gc_rule_from_pb_intersection(): + import datetime + from google.cloud.bigtable.column_family import _gc_rule_from_pb + from google.cloud.bigtable.column_family import GCRuleIntersection + from google.cloud.bigtable.column_family import MaxAgeGCRule + from google.cloud.bigtable.column_family import MaxVersionsGCRule + + rule1 = MaxVersionsGCRule(1) + rule2 = MaxAgeGCRule(datetime.timedelta(seconds=1)) + orig_rule = GCRuleIntersection([rule1, rule2]) + gc_rule_pb = orig_rule.to_pb() + result = _gc_rule_from_pb(gc_rule_pb) + assert isinstance(result, GCRuleIntersection) + assert result == orig_rule + + +def test__gc_rule_from_pb_unknown_field_name(): + from google.cloud.bigtable.column_family import _gc_rule_from_pb + + class MockProto(object): + + names = [] + + _pb = {} - def test___ne__same_value(self): - max_age = object() - gc_rule1 = self._make_one(max_age=max_age) - gc_rule2 = self._make_one(max_age=max_age) - comparison_val = gc_rule1 != gc_rule2 - self.assertFalse(comparison_val) + @classmethod + def WhichOneof(cls, name): + cls.names.append(name) + return "unknown" - def test_to_pb(self): - import datetime - from google.protobuf import duration_pb2 + MockProto._pb = MockProto - max_age = datetime.timedelta(seconds=1) - duration = duration_pb2.Duration(seconds=1) - gc_rule = self._make_one(max_age=max_age) - pb_val = gc_rule.to_pb() - self.assertEqual(pb_val, _GcRulePB(max_age=duration)) + assert MockProto.names == [] + with pytest.raises(ValueError): + _gc_rule_from_pb(MockProto) -class TestGCRuleUnion(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.column_family import GCRuleUnion - - return GCRuleUnion - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor(self): - rules = object() - rule_union = self._make_one(rules) - self.assertIs(rule_union.rules, rules) - - def test___eq__(self): - rules = object() - gc_rule1 = self._make_one(rules) - gc_rule2 = self._make_one(rules) - self.assertEqual(gc_rule1, gc_rule2) - - def test___eq__type_differ(self): - rules = object() - gc_rule1 = self._make_one(rules) - gc_rule2 = object() - self.assertNotEqual(gc_rule1, gc_rule2) - - def test___ne__same_value(self): - rules = object() - gc_rule1 = self._make_one(rules) - gc_rule2 = self._make_one(rules) - comparison_val = gc_rule1 != gc_rule2 - self.assertFalse(comparison_val) - - def test_to_pb(self): - import datetime - from google.protobuf import duration_pb2 - from google.cloud.bigtable.column_family import MaxAgeGCRule - from google.cloud.bigtable.column_family import MaxVersionsGCRule - - max_num_versions = 42 - rule1 = MaxVersionsGCRule(max_num_versions) - pb_rule1 = _GcRulePB(max_num_versions=max_num_versions) - - max_age = datetime.timedelta(seconds=1) - rule2 = MaxAgeGCRule(max_age) - pb_rule2 = _GcRulePB(max_age=duration_pb2.Duration(seconds=1)) - - rule3 = self._make_one(rules=[rule1, rule2]) - pb_rule3 = _GcRulePB(union=_GcRuleUnionPB(rules=[pb_rule1, pb_rule2])) - - gc_rule_pb = rule3.to_pb() - self.assertEqual(gc_rule_pb, pb_rule3) - - def test_to_pb_nested(self): - import datetime - from google.protobuf import duration_pb2 - from google.cloud.bigtable.column_family import MaxAgeGCRule - from google.cloud.bigtable.column_family import MaxVersionsGCRule - - max_num_versions1 = 42 - rule1 = MaxVersionsGCRule(max_num_versions1) - pb_rule1 = _GcRulePB(max_num_versions=max_num_versions1) - - max_age = datetime.timedelta(seconds=1) - rule2 = MaxAgeGCRule(max_age) - pb_rule2 = _GcRulePB(max_age=duration_pb2.Duration(seconds=1)) - - rule3 = self._make_one(rules=[rule1, rule2]) - pb_rule3 = _GcRulePB(union=_GcRuleUnionPB(rules=[pb_rule1, pb_rule2])) - - max_num_versions2 = 1337 - rule4 = MaxVersionsGCRule(max_num_versions2) - pb_rule4 = _GcRulePB(max_num_versions=max_num_versions2) - - rule5 = self._make_one(rules=[rule3, rule4]) - pb_rule5 = _GcRulePB(union=_GcRuleUnionPB(rules=[pb_rule3, pb_rule4])) - - gc_rule_pb = rule5.to_pb() - self.assertEqual(gc_rule_pb, pb_rule5) - - -class TestGCRuleIntersection(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.column_family import GCRuleIntersection - - return GCRuleIntersection - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor(self): - rules = object() - rule_intersection = self._make_one(rules) - self.assertIs(rule_intersection.rules, rules) - - def test___eq__(self): - rules = object() - gc_rule1 = self._make_one(rules) - gc_rule2 = self._make_one(rules) - self.assertEqual(gc_rule1, gc_rule2) - - def test___eq__type_differ(self): - rules = object() - gc_rule1 = self._make_one(rules) - gc_rule2 = object() - self.assertNotEqual(gc_rule1, gc_rule2) - - def test___ne__same_value(self): - rules = object() - gc_rule1 = self._make_one(rules) - gc_rule2 = self._make_one(rules) - comparison_val = gc_rule1 != gc_rule2 - self.assertFalse(comparison_val) - - def test_to_pb(self): - import datetime - from google.protobuf import duration_pb2 - from google.cloud.bigtable.column_family import MaxAgeGCRule - from google.cloud.bigtable.column_family import MaxVersionsGCRule - - max_num_versions = 42 - rule1 = MaxVersionsGCRule(max_num_versions) - pb_rule1 = _GcRulePB(max_num_versions=max_num_versions) - - max_age = datetime.timedelta(seconds=1) - rule2 = MaxAgeGCRule(max_age) - pb_rule2 = _GcRulePB(max_age=duration_pb2.Duration(seconds=1)) - - rule3 = self._make_one(rules=[rule1, rule2]) - pb_rule3 = _GcRulePB( - intersection=_GcRuleIntersectionPB(rules=[pb_rule1, pb_rule2]) - ) - - gc_rule_pb = rule3.to_pb() - self.assertEqual(gc_rule_pb, pb_rule3) - - def test_to_pb_nested(self): - import datetime - from google.protobuf import duration_pb2 - from google.cloud.bigtable.column_family import MaxAgeGCRule - from google.cloud.bigtable.column_family import MaxVersionsGCRule - - max_num_versions1 = 42 - rule1 = MaxVersionsGCRule(max_num_versions1) - pb_rule1 = _GcRulePB(max_num_versions=max_num_versions1) - - max_age = datetime.timedelta(seconds=1) - rule2 = MaxAgeGCRule(max_age) - pb_rule2 = _GcRulePB(max_age=duration_pb2.Duration(seconds=1)) - - rule3 = self._make_one(rules=[rule1, rule2]) - pb_rule3 = _GcRulePB( - intersection=_GcRuleIntersectionPB(rules=[pb_rule1, pb_rule2]) - ) - - max_num_versions2 = 1337 - rule4 = MaxVersionsGCRule(max_num_versions2) - pb_rule4 = _GcRulePB(max_num_versions=max_num_versions2) - - rule5 = self._make_one(rules=[rule3, rule4]) - pb_rule5 = _GcRulePB( - intersection=_GcRuleIntersectionPB(rules=[pb_rule3, pb_rule4]) - ) - - gc_rule_pb = rule5.to_pb() - self.assertEqual(gc_rule_pb, pb_rule5) - - -class TestColumnFamily(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.column_family import ColumnFamily - - return ColumnFamily - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - @staticmethod - def _get_target_client_class(): - from google.cloud.bigtable.client import Client - - return Client - - def _make_client(self, *args, **kwargs): - return self._get_target_client_class()(*args, **kwargs) - - def test_constructor(self): - column_family_id = u"column-family-id" - table = object() - gc_rule = object() - column_family = self._make_one(column_family_id, table, gc_rule=gc_rule) - - self.assertEqual(column_family.column_family_id, column_family_id) - self.assertIs(column_family._table, table) - self.assertIs(column_family.gc_rule, gc_rule) - - def test_name_property(self): - column_family_id = u"column-family-id" - table_name = "table_name" - table = _Table(table_name) - column_family = self._make_one(column_family_id, table) - - expected_name = table_name + "/columnFamilies/" + column_family_id - self.assertEqual(column_family.name, expected_name) - - def test___eq__(self): - column_family_id = "column_family_id" - table = object() - gc_rule = object() - column_family1 = self._make_one(column_family_id, table, gc_rule=gc_rule) - column_family2 = self._make_one(column_family_id, table, gc_rule=gc_rule) - self.assertEqual(column_family1, column_family2) - - def test___eq__type_differ(self): - column_family1 = self._make_one("column_family_id", None) - column_family2 = object() - self.assertNotEqual(column_family1, column_family2) - - def test___ne__same_value(self): - column_family_id = "column_family_id" - table = object() - gc_rule = object() - column_family1 = self._make_one(column_family_id, table, gc_rule=gc_rule) - column_family2 = self._make_one(column_family_id, table, gc_rule=gc_rule) - comparison_val = column_family1 != column_family2 - self.assertFalse(comparison_val) - - def test___ne__(self): - column_family1 = self._make_one("column_family_id1", None) - column_family2 = self._make_one("column_family_id2", None) - self.assertNotEqual(column_family1, column_family2) - - def test_to_pb_no_rules(self): - column_family = self._make_one("column_family_id", None) - pb_val = column_family.to_pb() - expected = _ColumnFamilyPB() - self.assertEqual(pb_val, expected) - - def test_to_pb_with_rule(self): - from google.cloud.bigtable.column_family import MaxVersionsGCRule - - gc_rule = MaxVersionsGCRule(1) - column_family = self._make_one("column_family_id", None, gc_rule=gc_rule) - pb_val = column_family.to_pb() - expected = _ColumnFamilyPB(gc_rule=gc_rule.to_pb()) - self.assertEqual(pb_val, expected) - - def _create_test_helper(self, gc_rule=None): - from google.cloud.bigtable_admin_v2.types import ( - bigtable_table_admin as table_admin_v2_pb2, - ) - from tests.unit._testing import _FakeStub - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - BigtableTableAdminClient, - ) - - project_id = "project-id" - zone = "zone" - cluster_id = "cluster-id" - table_id = "table-id" - column_family_id = "column-family-id" - table_name = ( - "projects/" - + project_id - + "/zones/" - + zone - + "/clusters/" - + cluster_id - + "/tables/" - + table_id - ) - - api = mock.create_autospec(BigtableTableAdminClient) - - credentials = _make_credentials() - client = self._make_client( - project=project_id, credentials=credentials, admin=True - ) - table = _Table(table_name, client=client) - column_family = self._make_one(column_family_id, table, gc_rule=gc_rule) - - # Create request_pb - if gc_rule is None: - column_family_pb = _ColumnFamilyPB() - else: - column_family_pb = _ColumnFamilyPB(gc_rule=gc_rule.to_pb()) - request_pb = table_admin_v2_pb2.ModifyColumnFamiliesRequest(name=table_name) - modification = table_admin_v2_pb2.ModifyColumnFamiliesRequest.Modification() - modification.id = column_family_id - modification.create = column_family_pb - request_pb.modifications.append(modification) - - # Create response_pb - response_pb = _ColumnFamilyPB() - - # Patch the stub used by the API method. - stub = _FakeStub(response_pb) - client._table_admin_client = api - client._table_admin_client.transport.create = stub - - # Create expected_result. - expected_result = None # create() has no return value. - - # Perform the method and check the result. - self.assertEqual(stub.results, (response_pb,)) - result = column_family.create() - self.assertEqual(result, expected_result) - - def test_create(self): - self._create_test_helper(gc_rule=None) - - def test_create_with_gc_rule(self): - from google.cloud.bigtable.column_family import MaxVersionsGCRule - - gc_rule = MaxVersionsGCRule(1337) - self._create_test_helper(gc_rule=gc_rule) - - def _update_test_helper(self, gc_rule=None): - from tests.unit._testing import _FakeStub - from google.cloud.bigtable_admin_v2.types import ( - bigtable_table_admin as table_admin_v2_pb2, - ) - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - BigtableTableAdminClient, - ) - - project_id = "project-id" - zone = "zone" - cluster_id = "cluster-id" - table_id = "table-id" - column_family_id = "column-family-id" - table_name = ( - "projects/" - + project_id - + "/zones/" - + zone - + "/clusters/" - + cluster_id - + "/tables/" - + table_id - ) - - api = mock.create_autospec(BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project=project_id, credentials=credentials, admin=True - ) - table = _Table(table_name, client=client) - column_family = self._make_one(column_family_id, table, gc_rule=gc_rule) - - # Create request_pb - if gc_rule is None: - column_family_pb = _ColumnFamilyPB() - else: - column_family_pb = _ColumnFamilyPB(gc_rule=gc_rule.to_pb()) - request_pb = table_admin_v2_pb2.ModifyColumnFamiliesRequest(name=table_name) - modification = table_admin_v2_pb2.ModifyColumnFamiliesRequest.Modification() - modification.id = column_family_id - modification.update = column_family_pb - request_pb.modifications.append(modification) - - # Create response_pb - response_pb = _ColumnFamilyPB() - - # Patch the stub used by the API method. - stub = _FakeStub(response_pb) - client._table_admin_client = api - client._table_admin_client.transport.update = stub - - # Create expected_result. - expected_result = None # update() has no return value. - - # Perform the method and check the result. - self.assertEqual(stub.results, (response_pb,)) - result = column_family.update() - self.assertEqual(result, expected_result) - - def test_update(self): - self._update_test_helper(gc_rule=None) - - def test_update_with_gc_rule(self): - from google.cloud.bigtable.column_family import MaxVersionsGCRule - - gc_rule = MaxVersionsGCRule(1337) - self._update_test_helper(gc_rule=gc_rule) - - def test_delete(self): - from google.protobuf import empty_pb2 - from google.cloud.bigtable_admin_v2.types import ( - bigtable_table_admin as table_admin_v2_pb2, - ) - from tests.unit._testing import _FakeStub - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - BigtableTableAdminClient, - ) - - project_id = "project-id" - zone = "zone" - cluster_id = "cluster-id" - table_id = "table-id" - column_family_id = "column-family-id" - table_name = ( - "projects/" - + project_id - + "/zones/" - + zone - + "/clusters/" - + cluster_id - + "/tables/" - + table_id - ) - - api = mock.create_autospec(BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project=project_id, credentials=credentials, admin=True - ) - table = _Table(table_name, client=client) - column_family = self._make_one(column_family_id, table) - - # Create request_pb - request_pb = table_admin_v2_pb2.ModifyColumnFamiliesRequest(name=table_name) - modification = table_admin_v2_pb2.ModifyColumnFamiliesRequest.Modification( - id=column_family_id, drop=True - ) - request_pb.modifications.append(modification) - - # Create response_pb - response_pb = empty_pb2.Empty() - - # Patch the stub used by the API method. - stub = _FakeStub(response_pb) - client._table_admin_client = api - client._table_admin_client.transport.delete = stub - - # Create expected_result. - expected_result = None # delete() has no return value. - - # Perform the method and check the result. - self.assertEqual(stub.results, (response_pb,)) - result = column_family.delete() - self.assertEqual(result, expected_result) - - -class Test__gc_rule_from_pb(unittest.TestCase): - def _call_fut(self, *args, **kwargs): - from google.cloud.bigtable.column_family import _gc_rule_from_pb - - return _gc_rule_from_pb(*args, **kwargs) - - def test_empty(self): - - gc_rule_pb = _GcRulePB() - self.assertIsNone(self._call_fut(gc_rule_pb)) - - def test_max_num_versions(self): - from google.cloud.bigtable.column_family import MaxVersionsGCRule - - orig_rule = MaxVersionsGCRule(1) - gc_rule_pb = orig_rule.to_pb() - result = self._call_fut(gc_rule_pb) - self.assertIsInstance(result, MaxVersionsGCRule) - self.assertEqual(result, orig_rule) - - def test_max_age(self): - import datetime - from google.cloud.bigtable.column_family import MaxAgeGCRule - - orig_rule = MaxAgeGCRule(datetime.timedelta(seconds=1)) - gc_rule_pb = orig_rule.to_pb() - result = self._call_fut(gc_rule_pb) - self.assertIsInstance(result, MaxAgeGCRule) - self.assertEqual(result, orig_rule) - - def test_union(self): - import datetime - from google.cloud.bigtable.column_family import GCRuleUnion - from google.cloud.bigtable.column_family import MaxAgeGCRule - from google.cloud.bigtable.column_family import MaxVersionsGCRule - - rule1 = MaxVersionsGCRule(1) - rule2 = MaxAgeGCRule(datetime.timedelta(seconds=1)) - orig_rule = GCRuleUnion([rule1, rule2]) - gc_rule_pb = orig_rule.to_pb() - result = self._call_fut(gc_rule_pb) - self.assertIsInstance(result, GCRuleUnion) - self.assertEqual(result, orig_rule) - - def test_intersection(self): - import datetime - from google.cloud.bigtable.column_family import GCRuleIntersection - from google.cloud.bigtable.column_family import MaxAgeGCRule - from google.cloud.bigtable.column_family import MaxVersionsGCRule - - rule1 = MaxVersionsGCRule(1) - rule2 = MaxAgeGCRule(datetime.timedelta(seconds=1)) - orig_rule = GCRuleIntersection([rule1, rule2]) - gc_rule_pb = orig_rule.to_pb() - result = self._call_fut(gc_rule_pb) - self.assertIsInstance(result, GCRuleIntersection) - self.assertEqual(result, orig_rule) - - def test_unknown_field_name(self): - class MockProto(object): - - names = [] - - _pb = {} - - @classmethod - def WhichOneof(cls, name): - cls.names.append(name) - return "unknown" - - MockProto._pb = MockProto - - self.assertEqual(MockProto.names, []) - self.assertRaises(ValueError, self._call_fut, MockProto) - self.assertEqual(MockProto.names, ["rule"]) + assert MockProto.names == ["rule"] def _GcRulePB(*args, **kw): From db0f7c7aae9276c1aafa874a8c8afefa83aafc0d Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 20 Oct 2021 16:09:45 -0400 Subject: [PATCH 07/16] tests: refactor 'encryption_info' unit tests w/ pytest idioms --- tests/unit/test_encryption_info.py | 220 +++++++++++++++-------------- 1 file changed, 112 insertions(+), 108 deletions(-) diff --git a/tests/unit/test_encryption_info.py b/tests/unit/test_encryption_info.py index ede6f4883..8b92a83ed 100644 --- a/tests/unit/test_encryption_info.py +++ b/tests/unit/test_encryption_info.py @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest - import mock from google.cloud.bigtable import enums @@ -55,113 +53,119 @@ def _make_info_pb( ) -class TestEncryptionInfo(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.encryption_info import EncryptionInfo +def _make_encryption_info(*args, **kwargs): + from google.cloud.bigtable.encryption_info import EncryptionInfo + + return EncryptionInfo(*args, **kwargs) + + +def _make_encryption_info_defaults( + encryption_type=EncryptionType.GOOGLE_DEFAULT_ENCRYPTION, + code=_STATUS_CODE, + message=_STATUS_MESSAGE, + kms_key_version=_KMS_KEY_VERSION, +): + encryption_status = _make_status(code=code, message=message) + return _make_encryption_info(encryption_type, encryption_status, kms_key_version) + + +def test_encryption_info__from_pb(): + from google.cloud.bigtable.encryption_info import EncryptionInfo - return EncryptionInfo + info_pb = _make_info_pb() - def _make_one(self, encryption_type, encryption_status, kms_key_version): - return self._get_target_class()( - encryption_type, encryption_status, kms_key_version, - ) + info = EncryptionInfo._from_pb(info_pb) - def _make_one_defaults( - self, - encryption_type=EncryptionType.GOOGLE_DEFAULT_ENCRYPTION, - code=_STATUS_CODE, - message=_STATUS_MESSAGE, + assert info.encryption_type == EncryptionType.GOOGLE_DEFAULT_ENCRYPTION + assert info.encryption_status.code == _STATUS_CODE + assert info.encryption_status.message == _STATUS_MESSAGE + assert info.kms_key_version == _KMS_KEY_VERSION + + +def test_encryption_info_ctor(): + encryption_type = EncryptionType.GOOGLE_DEFAULT_ENCRYPTION + encryption_status = _make_status() + + info = _make_encryption_info( + encryption_type=encryption_type, + encryption_status=encryption_status, kms_key_version=_KMS_KEY_VERSION, - ): - encryption_status = _make_status(code=code, message=message) - return self._make_one(encryption_type, encryption_status, kms_key_version) - - def test__from_pb(self): - klass = self._get_target_class() - info_pb = _make_info_pb() - - info = klass._from_pb(info_pb) - - self.assertEqual( - info.encryption_type, EncryptionType.GOOGLE_DEFAULT_ENCRYPTION, - ) - self.assertEqual(info.encryption_status.code, _STATUS_CODE) - self.assertEqual(info.encryption_status.message, _STATUS_MESSAGE) - self.assertEqual(info.kms_key_version, _KMS_KEY_VERSION) - - def test_ctor(self): - encryption_type = EncryptionType.GOOGLE_DEFAULT_ENCRYPTION - encryption_status = _make_status() - - info = self._make_one( - encryption_type=encryption_type, - encryption_status=encryption_status, - kms_key_version=_KMS_KEY_VERSION, - ) - - self.assertEqual(info.encryption_type, encryption_type) - self.assertEqual(info.encryption_status, encryption_status) - self.assertEqual(info.kms_key_version, _KMS_KEY_VERSION) - - def test___eq___identity(self): - info = self._make_one_defaults() - self.assertTrue(info == info) - - def test___eq___wrong_type(self): - info = self._make_one_defaults() - other = object() - self.assertFalse(info == other) - - def test___eq___same_values(self): - info = self._make_one_defaults() - other = self._make_one_defaults() - self.assertTrue(info == other) - - def test___eq___different_encryption_type(self): - info = self._make_one_defaults() - other = self._make_one_defaults( - encryption_type=EncryptionType.CUSTOMER_MANAGED_ENCRYPTION, - ) - self.assertFalse(info == other) - - def test___eq___different_encryption_status(self): - info = self._make_one_defaults() - other = self._make_one_defaults(code=456) - self.assertFalse(info == other) - - def test___eq___different_kms_key_version(self): - info = self._make_one_defaults() - other = self._make_one_defaults(kms_key_version=789) - self.assertFalse(info == other) - - def test___ne___identity(self): - info = self._make_one_defaults() - self.assertFalse(info != info) - - def test___ne___wrong_type(self): - info = self._make_one_defaults() - other = object() - self.assertTrue(info != other) - - def test___ne___same_values(self): - info = self._make_one_defaults() - other = self._make_one_defaults() - self.assertFalse(info != other) - - def test___ne___different_encryption_type(self): - info = self._make_one_defaults() - other = self._make_one_defaults( - encryption_type=EncryptionType.CUSTOMER_MANAGED_ENCRYPTION, - ) - self.assertTrue(info != other) - - def test___ne___different_encryption_status(self): - info = self._make_one_defaults() - other = self._make_one_defaults(code=456) - self.assertTrue(info != other) - - def test___ne___different_kms_key_version(self): - info = self._make_one_defaults() - other = self._make_one_defaults(kms_key_version=789) - self.assertTrue(info != other) + ) + + assert info.encryption_type == encryption_type + assert info.encryption_status == encryption_status + assert info.kms_key_version == _KMS_KEY_VERSION + + +def test_encryption_info___eq___identity(): + info = _make_encryption_info_defaults() + assert info == info + + +def test_encryption_info___eq___wrong_type(): + info = _make_encryption_info_defaults() + other = object() + assert not (info == other) + + +def test_encryption_info___eq___same_values(): + info = _make_encryption_info_defaults() + other = _make_encryption_info_defaults() + assert info == other + + +def test_encryption_info___eq___different_encryption_type(): + info = _make_encryption_info_defaults() + other = _make_encryption_info_defaults( + encryption_type=EncryptionType.CUSTOMER_MANAGED_ENCRYPTION, + ) + assert not (info == other) + + +def test_encryption_info___eq___different_encryption_status(): + info = _make_encryption_info_defaults() + other = _make_encryption_info_defaults(code=456) + assert not (info == other) + + +def test_encryption_info___eq___different_kms_key_version(): + info = _make_encryption_info_defaults() + other = _make_encryption_info_defaults(kms_key_version=789) + assert not (info == other) + + +def test_encryption_info___ne___identity(): + info = _make_encryption_info_defaults() + assert not (info != info) + + +def test_encryption_info___ne___wrong_type(): + info = _make_encryption_info_defaults() + other = object() + assert info != other + + +def test_encryption_info___ne___same_values(): + info = _make_encryption_info_defaults() + other = _make_encryption_info_defaults() + assert not (info != other) + + +def test_encryption_info___ne___different_encryption_type(): + info = _make_encryption_info_defaults() + other = _make_encryption_info_defaults( + encryption_type=EncryptionType.CUSTOMER_MANAGED_ENCRYPTION, + ) + assert info != other + + +def test_encryption_info___ne___different_encryption_status(): + info = _make_encryption_info_defaults() + other = _make_encryption_info_defaults(code=456) + assert info != other + + +def test_encryption_info___ne___different_kms_key_version(): + info = _make_encryption_info_defaults() + other = _make_encryption_info_defaults(kms_key_version=789) + assert info != other From 3451cb6bb88b4b52dae012fa0233fbf44ef54a00 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 20 Oct 2021 16:14:37 -0400 Subject: [PATCH 08/16] tests: refactor 'error' unit tests w/ pytest idioms --- tests/unit/test_error.py | 170 ++++++++++++++++++++------------------- 1 file changed, 87 insertions(+), 83 deletions(-) diff --git a/tests/unit/test_error.py b/tests/unit/test_error.py index c53d63991..8b148473c 100644 --- a/tests/unit/test_error.py +++ b/tests/unit/test_error.py @@ -12,86 +12,90 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest - - -class TestStatus(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.error import Status - - return Status - - @staticmethod - def _make_status_pb(**kwargs): - from google.rpc.status_pb2 import Status - - return Status(**kwargs) - - def _make_one(self, status_pb): - return self._get_target_class()(status_pb) - - def test_ctor(self): - status_pb = self._make_status_pb() - status = self._make_one(status_pb) - self.assertIs(status.status_pb, status_pb) - - def test_code(self): - code = 123 - status_pb = self._make_status_pb(code=code) - status = self._make_one(status_pb) - self.assertEqual(status.code, code) - - def test_message(self): - message = "message" - status_pb = self._make_status_pb(message=message) - status = self._make_one(status_pb) - self.assertEqual(status.message, message) - - def test___eq___self(self): - status_pb = self._make_status_pb() - status = self._make_one(status_pb) - self.assertTrue(status == status) - - def test___eq___other_hit(self): - status_pb = self._make_status_pb(code=123, message="message") - status = self._make_one(status_pb) - other = self._make_one(status_pb) - self.assertTrue(status == other) - - def test___eq___other_miss(self): - status_pb = self._make_status_pb(code=123, message="message") - other_status_pb = self._make_status_pb(code=456, message="oops") - status = self._make_one(status_pb) - other = self._make_one(other_status_pb) - self.assertFalse(status == other) - - def test___eq___wrong_type(self): - status_pb = self._make_status_pb(code=123, message="message") - status = self._make_one(status_pb) - other = object() - self.assertFalse(status == other) - - def test___ne___self(self): - status_pb = self._make_status_pb() - status = self._make_one(status_pb) - self.assertFalse(status != status) - - def test___ne___other_hit(self): - status_pb = self._make_status_pb(code=123, message="message") - status = self._make_one(status_pb) - other = self._make_one(status_pb) - self.assertFalse(status != other) - - def test___ne___other_miss(self): - status_pb = self._make_status_pb(code=123, message="message") - other_status_pb = self._make_status_pb(code=456, message="oops") - status = self._make_one(status_pb) - other = self._make_one(other_status_pb) - self.assertTrue(status != other) - - def test___ne___wrong_type(self): - status_pb = self._make_status_pb(code=123, message="message") - status = self._make_one(status_pb) - other = object() - self.assertTrue(status != other) + +def _make_status_pb(**kwargs): + from google.rpc.status_pb2 import Status + + return Status(**kwargs) + + +def _make_status(status_pb): + from google.cloud.bigtable.error import Status + + return Status(status_pb) + + +def test_status_ctor(): + status_pb = _make_status_pb() + status = _make_status(status_pb) + assert status.status_pb is status_pb + + +def test_status_code(): + code = 123 + status_pb = _make_status_pb(code=code) + status = _make_status(status_pb) + assert status.code == code + + +def test_status_message(): + message = "message" + status_pb = _make_status_pb(message=message) + status = _make_status(status_pb) + assert status.message == message + + +def test_status___eq___self(): + status_pb = _make_status_pb() + status = _make_status(status_pb) + assert status == status + + +def test_status___eq___other_hit(): + status_pb = _make_status_pb(code=123, message="message") + status = _make_status(status_pb) + other = _make_status(status_pb) + assert status == other + + +def test_status___eq___other_miss(): + status_pb = _make_status_pb(code=123, message="message") + other_status_pb = _make_status_pb(code=456, message="oops") + status = _make_status(status_pb) + other = _make_status(other_status_pb) + assert not (status == other) + + +def test_status___eq___wrong_type(): + status_pb = _make_status_pb(code=123, message="message") + status = _make_status(status_pb) + other = object() + assert not (status == other) + + +def test_status___ne___self(): + status_pb = _make_status_pb() + status = _make_status(status_pb) + assert not (status != status) + + +def test_status___ne___other_hit(): + status_pb = _make_status_pb(code=123, message="message") + status = _make_status(status_pb) + other = _make_status(status_pb) + assert not (status != other) + + +def test_status___ne___other_miss(): + status_pb = _make_status_pb(code=123, message="message") + other_status_pb = _make_status_pb(code=456, message="oops") + status = _make_status(status_pb) + other = _make_status(other_status_pb) + assert status != other + + +def test_status___ne___wrong_type(): + status_pb = _make_status_pb(code=123, message="message") + status = _make_status(status_pb) + other = object() + assert status != other From c6dd6b0a93b215c2755e5422f5ee1f28ed4f59d3 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 20 Oct 2021 17:21:29 -0400 Subject: [PATCH 09/16] tests: refactor 'instance' unit tests w/ pytest idioms --- tests/unit/test_instance.py | 1905 +++++++++++++++++------------------ 1 file changed, 905 insertions(+), 1000 deletions(-) diff --git a/tests/unit/test_instance.py b/tests/unit/test_instance.py index e493fd9c8..def7e3e38 100644 --- a/tests/unit/test_instance.py +++ b/tests/unit/test_instance.py @@ -13,1014 +13,919 @@ # limitations under the License. -import unittest - import mock +import pytest from ._testing import _make_credentials from google.cloud.bigtable.cluster import Cluster +PROJECT = "project" +INSTANCE_ID = "instance-id" +INSTANCE_NAME = "projects/" + PROJECT + "/instances/" + INSTANCE_ID +LOCATION_ID = "locid" +LOCATION = "projects/" + PROJECT + "/locations/" + LOCATION_ID +APP_PROFILE_PATH = "projects/" + PROJECT + "/instances/" + INSTANCE_ID + "/appProfiles/" +DISPLAY_NAME = "display_name" +LABELS = {"foo": "bar"} +OP_ID = 8915 +OP_NAME = "operations/projects/{}/instances/{}operations/{}".format( + PROJECT, INSTANCE_ID, OP_ID +) +TABLE_ID = "table_id" +TABLE_NAME = INSTANCE_NAME + "/tables/" + TABLE_ID +CLUSTER_ID = "cluster-id" +CLUSTER_NAME = INSTANCE_NAME + "/clusters/" + CLUSTER_ID +BACKUP_ID = "backup-id" +BACKUP_NAME = CLUSTER_NAME + "/backups/" + BACKUP_ID + +APP_PROFILE_ID_1 = "app-profile-id-1" +DESCRIPTION_1 = "routing policy any" +APP_PROFILE_ID_2 = "app-profile-id-2" +DESCRIPTION_2 = "routing policy single" +ALLOW_WRITES = True +CLUSTER_ID = "cluster-id" + + +def _make_client(*args, **kwargs): + from google.cloud.bigtable.client import Client + + return Client(*args, **kwargs) + + +def _make_instance_admin_api(): + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( + BigtableInstanceAdminClient, + ) + + return mock.create_autospec(BigtableInstanceAdminClient) + + +def _make_instance(*args, **kwargs): + from google.cloud.bigtable.instance import Instance + + return Instance(*args, **kwargs) + + +def test_instance_constructor_defaults(): + + client = object() + instance = _make_instance(INSTANCE_ID, client) + assert instance.instance_id == INSTANCE_ID + assert instance.display_name == INSTANCE_ID + assert instance.type_ is None + assert instance.labels is None + assert instance._client is client + assert instance.state is None + + +def test_instance_constructor_non_default(): + from google.cloud.bigtable import enums + + instance_type = enums.Instance.Type.DEVELOPMENT + state = enums.Instance.State.READY + labels = {"test": "test"} + client = object() + + instance = _make_instance( + INSTANCE_ID, + client, + display_name=DISPLAY_NAME, + instance_type=instance_type, + labels=labels, + _state=state, + ) + assert instance.instance_id == INSTANCE_ID + assert instance.display_name == DISPLAY_NAME + assert instance.type_ == instance_type + assert instance.labels == labels + assert instance._client is client + assert instance.state == state + + +def test_instance__update_from_pb_success(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable import enums + + instance_type = data_v2_pb2.Instance.Type.PRODUCTION + state = enums.Instance.State.READY + # todo type to type_? + instance_pb = data_v2_pb2.Instance( + display_name=DISPLAY_NAME, type_=instance_type, labels=LABELS, state=state, + ) + + instance = _make_instance(None, None) + assert instance.display_name is None + assert instance.type_ is None + assert instance.labels is None + instance._update_from_pb(instance_pb._pb) + assert instance.display_name == DISPLAY_NAME + assert instance.type_ == instance_type + assert instance.labels == LABELS + assert instance._state == state + + +def test_instance__update_from_pb_success_defaults(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable import enums + + instance_pb = data_v2_pb2.Instance(display_name=DISPLAY_NAME) + + instance = _make_instance(None, None) + assert instance.display_name is None + assert instance.type_ is None + assert instance.labels is None + instance._update_from_pb(instance_pb._pb) + assert instance.display_name == DISPLAY_NAME + assert instance.type_ == enums.Instance.Type.UNSPECIFIED + assert not instance.labels + + +def test_instance__update_from_pb_wo_display_name(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + + instance_pb = data_v2_pb2.Instance() + instance = _make_instance(None, None) + assert instance.display_name is None + + with pytest.raises(ValueError): + instance._update_from_pb(instance_pb) + + +def test_instance_from_pb_success(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable import enums + from google.cloud.bigtable.instance import Instance + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance_type = enums.Instance.Type.PRODUCTION + state = enums.Instance.State.READY + instance_pb = data_v2_pb2.Instance( + name=INSTANCE_NAME, + display_name=INSTANCE_ID, + type_=instance_type, + labels=LABELS, + state=state, + ) + + instance = Instance.from_pb(instance_pb, client) + + assert isinstance(instance, Instance) + assert instance._client == client + assert instance.instance_id == INSTANCE_ID + assert instance.display_name == INSTANCE_ID + assert instance.type_ == instance_type + assert instance.labels == LABELS + assert instance._state == state + + +def test_instance_from_pb_bad_instance_name(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.instance import Instance + + instance_name = "INCORRECT_FORMAT" + instance_pb = data_v2_pb2.Instance(name=instance_name) + + with pytest.raises(ValueError): + Instance.from_pb(instance_pb, None) + + +def test_instance_from_pb_project_mistmatch(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.instance import Instance + + ALT_PROJECT = "ALT_PROJECT" + credentials = _make_credentials() + client = _make_client(project=ALT_PROJECT, credentials=credentials, admin=True) -class TestInstance(unittest.TestCase): + instance_pb = data_v2_pb2.Instance(name=INSTANCE_NAME) + + with pytest.raises(ValueError): + Instance.from_pb(instance_pb, client) + + +def test_instance_name(): + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + + api = client._instance_admin_client = _make_instance_admin_api() + api.instance_path.return_value = INSTANCE_NAME + instance = _make_instance(INSTANCE_ID, client) + + assert instance.name == INSTANCE_NAME + + +def test_instance___eq__(): + client = object() + instance1 = _make_instance(INSTANCE_ID, client) + instance2 = _make_instance(INSTANCE_ID, client) + assert instance1 == instance2 + + +def test_instance___eq__type_differ(): + client = object() + instance1 = _make_instance(INSTANCE_ID, client) + instance2 = object() + assert instance1 != instance2 + + +def test_instance___ne__same_value(): + client = object() + instance1 = _make_instance(INSTANCE_ID, client) + instance2 = _make_instance(INSTANCE_ID, client) + assert not (instance1 != instance2) + + +def test_instance___ne__(): + instance1 = _make_instance("instance_id1", "client1") + instance2 = _make_instance("instance_id2", "client2") + assert instance1 != instance2 + + +def test_instance_create_w_location_and_clusters(): + instance = _make_instance(INSTANCE_ID, None) + + with pytest.raises(ValueError): + instance.create(location_id=LOCATION_ID, clusters=[object(), object()]) + + +def test_instance_create_w_serve_nodes_and_clusters(): + instance = _make_instance(INSTANCE_ID, None) + + with pytest.raises(ValueError): + instance.create(serve_nodes=3, clusters=[object(), object()]) + + +def test_instance_create_w_default_storage_type_and_clusters(): + instance = _make_instance(INSTANCE_ID, None) + + with pytest.raises(ValueError): + instance.create(default_storage_type=1, clusters=[object(), object()]) + + +def _instance_api_response_for_create(): + import datetime + from google.api_core import operation + from google.longrunning import operations_pb2 + from google.protobuf.any_pb2 import Any + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) + from google.cloud.bigtable_admin_v2.types import instance - PROJECT = "project" - INSTANCE_ID = "instance-id" - INSTANCE_NAME = "projects/" + PROJECT + "/instances/" + INSTANCE_ID - LOCATION_ID = "locid" - LOCATION = "projects/" + PROJECT + "/locations/" + LOCATION_ID - APP_PROFILE_PATH = ( - "projects/" + PROJECT + "/instances/" + INSTANCE_ID + "/appProfiles/" + NOW = datetime.datetime.utcnow() + NOW_PB = _datetime_to_pb_timestamp(NOW) + metadata = messages_v2_pb2.CreateInstanceMetadata(request_time=NOW_PB) + type_url = "type.googleapis.com/{}".format( + messages_v2_pb2.CreateInstanceMetadata._meta._pb.DESCRIPTOR.full_name ) - DISPLAY_NAME = "display_name" - LABELS = {"foo": "bar"} - OP_ID = 8915 - OP_NAME = "operations/projects/{}/instances/{}operations/{}".format( - PROJECT, INSTANCE_ID, OP_ID + response_pb = operations_pb2.Operation( + name=OP_NAME, + metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), ) - TABLE_ID = "table_id" - TABLE_NAME = INSTANCE_NAME + "/tables/" + TABLE_ID - CLUSTER_ID = "cluster-id" - CLUSTER_NAME = INSTANCE_NAME + "/clusters/" + CLUSTER_ID - BACKUP_ID = "backup-id" - BACKUP_NAME = CLUSTER_NAME + "/backups/" + BACKUP_ID - - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.instance import Instance - - return Instance - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - @staticmethod - def _get_target_client_class(): - from google.cloud.bigtable.client import Client - - return Client - - def _make_client(self, *args, **kwargs): - return self._get_target_client_class()(*args, **kwargs) - - def test_constructor_defaults(self): - - client = object() - instance = self._make_one(self.INSTANCE_ID, client) - self.assertEqual(instance.instance_id, self.INSTANCE_ID) - self.assertEqual(instance.display_name, self.INSTANCE_ID) - self.assertIsNone(instance.type_) - self.assertIsNone(instance.labels) - self.assertIs(instance._client, client) - self.assertIsNone(instance.state) - - def test_constructor_non_default(self): - from google.cloud.bigtable import enums - - instance_type = enums.Instance.Type.DEVELOPMENT - state = enums.Instance.State.READY - labels = {"test": "test"} - client = object() - - instance = self._make_one( - self.INSTANCE_ID, - client, - display_name=self.DISPLAY_NAME, - instance_type=instance_type, - labels=labels, - _state=state, - ) - self.assertEqual(instance.instance_id, self.INSTANCE_ID) - self.assertEqual(instance.display_name, self.DISPLAY_NAME) - self.assertEqual(instance.type_, instance_type) - self.assertEqual(instance.labels, labels) - self.assertIs(instance._client, client) - self.assertEqual(instance.state, state) - - def test__update_from_pb_success(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable import enums - - instance_type = data_v2_pb2.Instance.Type.PRODUCTION - state = enums.Instance.State.READY - # todo type to type_? - instance_pb = data_v2_pb2.Instance( - display_name=self.DISPLAY_NAME, - type_=instance_type, - labels=self.LABELS, - state=state, - ) - - instance = self._make_one(None, None) - self.assertIsNone(instance.display_name) - self.assertIsNone(instance.type_) - self.assertIsNone(instance.labels) - instance._update_from_pb(instance_pb._pb) - self.assertEqual(instance.display_name, self.DISPLAY_NAME) - self.assertEqual(instance.type_, instance_type) - self.assertEqual(instance.labels, self.LABELS) - self.assertEqual(instance._state, state) - - def test__update_from_pb_success_defaults(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable import enums - - instance_pb = data_v2_pb2.Instance(display_name=self.DISPLAY_NAME) - - instance = self._make_one(None, None) - self.assertIsNone(instance.display_name) - self.assertIsNone(instance.type_) - self.assertIsNone(instance.labels) - instance._update_from_pb(instance_pb._pb) - self.assertEqual(instance.display_name, self.DISPLAY_NAME) - self.assertEqual(instance.type_, enums.Instance.Type.UNSPECIFIED) - self.assertFalse(instance.labels) - - def test__update_from_pb_no_display_name(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - - instance_pb = data_v2_pb2.Instance() - instance = self._make_one(None, None) - self.assertIsNone(instance.display_name) - with self.assertRaises(ValueError): - instance._update_from_pb(instance_pb) - - def test_from_pb_success(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable import enums - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance_type = enums.Instance.Type.PRODUCTION - state = enums.Instance.State.READY - instance_pb = data_v2_pb2.Instance( - name=self.INSTANCE_NAME, - display_name=self.INSTANCE_ID, - type_=instance_type, - labels=self.LABELS, - state=state, - ) - - klass = self._get_target_class() - instance = klass.from_pb(instance_pb, client) - self.assertIsInstance(instance, klass) - self.assertEqual(instance._client, client) - self.assertEqual(instance.instance_id, self.INSTANCE_ID) - self.assertEqual(instance.display_name, self.INSTANCE_ID) - self.assertEqual(instance.type_, instance_type) - self.assertEqual(instance.labels, self.LABELS) - self.assertEqual(instance._state, state) - - def test_from_pb_bad_instance_name(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - - instance_name = "INCORRECT_FORMAT" - instance_pb = data_v2_pb2.Instance(name=instance_name) - - klass = self._get_target_class() - with self.assertRaises(ValueError): - klass.from_pb(instance_pb, None) - - def test_from_pb_project_mistmatch(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - - ALT_PROJECT = "ALT_PROJECT" - credentials = _make_credentials() - client = self._make_client( - project=ALT_PROJECT, credentials=credentials, admin=True - ) - - self.assertNotEqual(self.PROJECT, ALT_PROJECT) - - instance_pb = data_v2_pb2.Instance(name=self.INSTANCE_NAME) - - klass = self._get_target_class() - with self.assertRaises(ValueError): - klass.from_pb(instance_pb, client) - - def test_name_property(self): - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - - api = mock.create_autospec(BigtableInstanceAdminClient) - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - - api.instance_path.return_value = "projects/project/instances/instance-id" - # Patch the the API method. - client._instance_admin_client = api - - instance = self._make_one(self.INSTANCE_ID, client) - self.assertEqual(instance.name, self.INSTANCE_NAME) - - def test___eq__(self): - client = object() - instance1 = self._make_one(self.INSTANCE_ID, client) - instance2 = self._make_one(self.INSTANCE_ID, client) - self.assertEqual(instance1, instance2) - - def test___eq__type_differ(self): - client = object() - instance1 = self._make_one(self.INSTANCE_ID, client) - instance2 = object() - self.assertNotEqual(instance1, instance2) - - def test___ne__same_value(self): - client = object() - instance1 = self._make_one(self.INSTANCE_ID, client) - instance2 = self._make_one(self.INSTANCE_ID, client) - comparison_val = instance1 != instance2 - self.assertFalse(comparison_val) - - def test___ne__(self): - instance1 = self._make_one("instance_id1", "client1") - instance2 = self._make_one("instance_id2", "client2") - self.assertNotEqual(instance1, instance2) - - def test_create_check_location_and_clusters(self): - instance = self._make_one(self.INSTANCE_ID, None) - - with self.assertRaises(ValueError): - instance.create(location_id=self.LOCATION_ID, clusters=[object(), object()]) - - def test_create_check_serve_nodes_and_clusters(self): - instance = self._make_one(self.INSTANCE_ID, None) - - with self.assertRaises(ValueError): - instance.create(serve_nodes=3, clusters=[object(), object()]) - - def test_create_check_default_storage_type_and_clusters(self): - instance = self._make_one(self.INSTANCE_ID, None) - - with self.assertRaises(ValueError): - instance.create(default_storage_type=1, clusters=[object(), object()]) - - def _instance_api_response_for_create(self): - import datetime - from google.api_core import operation - from google.longrunning import operations_pb2 - from google.protobuf.any_pb2 import Any - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.cloud.bigtable_admin_v2.types import ( - bigtable_instance_admin as messages_v2_pb2, - ) - from google.cloud.bigtable_admin_v2.types import instance - - NOW = datetime.datetime.utcnow() - NOW_PB = _datetime_to_pb_timestamp(NOW) - metadata = messages_v2_pb2.CreateInstanceMetadata(request_time=NOW_PB) - type_url = "type.googleapis.com/{}".format( - messages_v2_pb2.CreateInstanceMetadata._meta._pb.DESCRIPTOR.full_name - ) - response_pb = operations_pb2.Operation( - name=self.OP_NAME, - metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), - ) - response = operation.from_gapic( - response_pb, - mock.Mock(), - instance.Instance, - metadata_type=messages_v2_pb2.CreateInstanceMetadata, - ) - project_path_template = "projects/{}" - location_path_template = "projects/{}/locations/{}" - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - instance_api.create_instance.return_value = response - instance_api.project_path = project_path_template.format - instance_api.location_path = location_path_template.format - instance_api.common_location_path = location_path_template.format - return instance_api, response - - def test_create(self): - from google.cloud.bigtable import enums - from google.cloud.bigtable_admin_v2.types import Instance - from google.cloud.bigtable_admin_v2.types import Cluster - import warnings - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = self._make_one( - self.INSTANCE_ID, - client, - self.DISPLAY_NAME, - enums.Instance.Type.PRODUCTION, - self.LABELS, - ) - instance_api, response = self._instance_api_response_for_create() - instance_api.common_project_path.return_value = "projects/project" - client._instance_admin_client = instance_api - serve_nodes = 3 - - with warnings.catch_warnings(record=True) as warned: - result = instance.create( - location_id=self.LOCATION_ID, serve_nodes=serve_nodes - ) - - cluster_pb = Cluster( - location=instance_api.location_path(self.PROJECT, self.LOCATION_ID), - serve_nodes=serve_nodes, - default_storage_type=enums.StorageType.UNSPECIFIED, - ) - instance_pb = Instance( - display_name=self.DISPLAY_NAME, - type_=enums.Instance.Type.PRODUCTION, - labels=self.LABELS, - ) - cluster_id = "{}-cluster".format(self.INSTANCE_ID) - instance_api.create_instance.assert_called_once_with( - request={ - "parent": instance_api.project_path(self.PROJECT), - "instance_id": self.INSTANCE_ID, - "instance": instance_pb, - "clusters": {cluster_id: cluster_pb}, - } - ) - - self.assertEqual(len(warned), 1) - self.assertIs(warned[0].category, DeprecationWarning) - - self.assertIs(result, response) - - def test_create_w_clusters(self): - from google.cloud.bigtable import enums - from google.cloud.bigtable.cluster import Cluster - from google.cloud.bigtable_admin_v2.types import Cluster as cluster_pb - from google.cloud.bigtable_admin_v2.types import Instance as instance_pb - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = self._make_one( - self.INSTANCE_ID, - client, - self.DISPLAY_NAME, - enums.Instance.Type.PRODUCTION, - self.LABELS, - ) - instance_api, response = self._instance_api_response_for_create() - instance_api.common_project_path.return_value = "projects/project" - client._instance_admin_client = instance_api - - # Perform the method and check the result. - cluster_id_1 = "cluster-1" - cluster_id_2 = "cluster-2" - location_id_1 = "location-id-1" - location_id_2 = "location-id-2" - serve_nodes_1 = 3 - serve_nodes_2 = 5 - clusters = [ - Cluster( - cluster_id_1, - instance, - location_id=location_id_1, - serve_nodes=serve_nodes_1, - ), - Cluster( - cluster_id_2, - instance, - location_id=location_id_2, - serve_nodes=serve_nodes_2, - ), - ] - - result = instance.create(clusters=clusters) - - cluster_pb_1 = cluster_pb( - location=instance_api.location_path(self.PROJECT, location_id_1), + response = operation.from_gapic( + response_pb, + mock.Mock(), + instance.Instance, + metadata_type=messages_v2_pb2.CreateInstanceMetadata, + ) + project_path_template = "projects/{}" + location_path_template = "projects/{}/locations/{}" + api = _make_instance_admin_api() + api.create_instance.return_value = response + api.project_path = project_path_template.format + api.location_path = location_path_template.format + api.common_location_path = location_path_template.format + return api, response + + +def test_instance_create(): + from google.cloud.bigtable import enums + from google.cloud.bigtable_admin_v2.types import Instance + from google.cloud.bigtable_admin_v2.types import Cluster + import warnings + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = _make_instance( + INSTANCE_ID, client, DISPLAY_NAME, enums.Instance.Type.PRODUCTION, LABELS, + ) + api, response = _instance_api_response_for_create() + client._instance_admin_client = api + api.common_project_path.return_value = "projects/project" + serve_nodes = 3 + + with warnings.catch_warnings(record=True) as warned: + result = instance.create(location_id=LOCATION_ID, serve_nodes=serve_nodes) + + assert result is response + + cluster_pb = Cluster( + location=api.location_path(PROJECT, LOCATION_ID), + serve_nodes=serve_nodes, + default_storage_type=enums.StorageType.UNSPECIFIED, + ) + instance_pb = Instance( + display_name=DISPLAY_NAME, type_=enums.Instance.Type.PRODUCTION, labels=LABELS, + ) + cluster_id = "{}-cluster".format(INSTANCE_ID) + api.create_instance.assert_called_once_with( + request={ + "parent": api.project_path(PROJECT), + "instance_id": INSTANCE_ID, + "instance": instance_pb, + "clusters": {cluster_id: cluster_pb}, + } + ) + + assert len(warned) == 1 + assert warned[0].category is DeprecationWarning + + +def test_instance_create_w_clusters(): + from google.cloud.bigtable import enums + from google.cloud.bigtable.cluster import Cluster + from google.cloud.bigtable_admin_v2.types import Cluster as cluster_pb + from google.cloud.bigtable_admin_v2.types import Instance as instance_pb + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = _make_instance( + INSTANCE_ID, client, DISPLAY_NAME, enums.Instance.Type.PRODUCTION, LABELS, + ) + api, response = _instance_api_response_for_create() + client._instance_admin_client = api + api.common_project_path.return_value = "projects/project" + cluster_id_1 = "cluster-1" + cluster_id_2 = "cluster-2" + location_id_1 = "location-id-1" + location_id_2 = "location-id-2" + serve_nodes_1 = 3 + serve_nodes_2 = 5 + clusters = [ + Cluster( + cluster_id_1, + instance, + location_id=location_id_1, serve_nodes=serve_nodes_1, - default_storage_type=enums.StorageType.UNSPECIFIED, - ) - cluster_pb_2 = cluster_pb( - location=instance_api.location_path(self.PROJECT, location_id_2), + ), + Cluster( + cluster_id_2, + instance, + location_id=location_id_2, serve_nodes=serve_nodes_2, - default_storage_type=enums.StorageType.UNSPECIFIED, - ) - instance_pb = instance_pb( - display_name=self.DISPLAY_NAME, - type_=enums.Instance.Type.PRODUCTION, - labels=self.LABELS, - ) - instance_api.create_instance.assert_called_once_with( - request={ - "parent": instance_api.project_path(self.PROJECT), - "instance_id": self.INSTANCE_ID, - "instance": instance_pb, - "clusters": {cluster_id_1: cluster_pb_1, cluster_id_2: cluster_pb_2}, - } - ) - - self.assertIs(result, response) - - def test_exists(self): - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.api_core import exceptions - - api = mock.create_autospec(BigtableInstanceAdminClient) - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - - # Create response_pb - instance_name = client.instance_admin_client.instance_path( - self.PROJECT, self.INSTANCE_ID - ) - response_pb = data_v2_pb2.Instance(name=instance_name) - - # Patch the stub used by the API method. - client._instance_admin_client = api - instance_admin_stub = client._instance_admin_client - - instance_admin_stub.get_instance.side_effect = [ - response_pb, - exceptions.NotFound("testing"), - exceptions.BadRequest("testing"), - ] - - # Perform the method and check the result. - non_existing_instance_id = "instance-id-2" - alt_instance_1 = self._make_one(self.INSTANCE_ID, client) - alt_instance_2 = self._make_one(non_existing_instance_id, client) - self.assertTrue(alt_instance_1.exists()) - self.assertFalse(alt_instance_2.exists()) - - with self.assertRaises(exceptions.BadRequest): - alt_instance_2.exists() - - def test_reload(self): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.cloud.bigtable import enums - - api = mock.create_autospec(BigtableInstanceAdminClient) - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = self._make_one(self.INSTANCE_ID, client) - - # Create response_pb - DISPLAY_NAME = u"hey-hi-hello" - instance_type = enums.Instance.Type.PRODUCTION - response_pb = data_v2_pb2.Instance( - display_name=DISPLAY_NAME, type_=instance_type, labels=self.LABELS - ) - - # Patch the stub used by the API method. - client._instance_admin_client = api - bigtable_instance_stub = client._instance_admin_client - bigtable_instance_stub.get_instance.side_effect = [response_pb] - - # Create expected_result. - expected_result = None # reload() has no return value. - - # Check Instance optional config values before. - self.assertEqual(instance.display_name, self.INSTANCE_ID) - - # Perform the method and check the result. - result = instance.reload() - self.assertEqual(result, expected_result) - - # Check Instance optional config values before. - self.assertEqual(instance.display_name, DISPLAY_NAME) - - def _instance_api_response_for_update(self): - import datetime - from google.api_core import operation - from google.longrunning import operations_pb2 - from google.protobuf.any_pb2 import Any - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.cloud.bigtable_admin_v2.types import ( - bigtable_instance_admin as messages_v2_pb2, - ) - from google.cloud.bigtable_admin_v2.types import instance - - NOW = datetime.datetime.utcnow() - NOW_PB = _datetime_to_pb_timestamp(NOW) - metadata = messages_v2_pb2.UpdateInstanceMetadata(request_time=NOW_PB) - type_url = "type.googleapis.com/{}".format( - messages_v2_pb2.UpdateInstanceMetadata._meta._pb.DESCRIPTOR.full_name - ) - response_pb = operations_pb2.Operation( - name=self.OP_NAME, - metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), - ) - response = operation.from_gapic( - response_pb, - mock.Mock(), - instance.Instance, - metadata_type=messages_v2_pb2.UpdateInstanceMetadata, - ) - instance_path_template = "projects/{project}/instances/{instance}" - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - instance_api.partial_update_instance.return_value = response - instance_api.instance_path = instance_path_template.format - return instance_api, response - - def test_update(self): - from google.cloud.bigtable import enums - from google.protobuf import field_mask_pb2 - from google.cloud.bigtable_admin_v2.types import Instance - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = self._make_one( - self.INSTANCE_ID, - client, - display_name=self.DISPLAY_NAME, - instance_type=enums.Instance.Type.DEVELOPMENT, - labels=self.LABELS, - ) - instance_api, response = self._instance_api_response_for_update() - client._instance_admin_client = instance_api - - result = instance.update() - - instance_pb = Instance( - name=instance.name, - display_name=instance.display_name, - type_=instance.type_, - labels=instance.labels, - ) - update_mask_pb = field_mask_pb2.FieldMask( - paths=["display_name", "type", "labels"] - ) - - instance_api.partial_update_instance.assert_called_once_with( - request={"instance": instance_pb, "update_mask": update_mask_pb} - ) - - self.assertIs(result, response) - - def test_update_empty(self): - from google.protobuf import field_mask_pb2 - from google.cloud.bigtable_admin_v2.types import Instance - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = self._make_one(None, client) - instance_api, response = self._instance_api_response_for_update() - client._instance_admin_client = instance_api - - result = instance.update() - - instance_pb = Instance( - name=instance.name, - display_name=instance.display_name, - type_=instance.type_, - labels=instance.labels, - ) - update_mask_pb = field_mask_pb2.FieldMask() - - instance_api.partial_update_instance.assert_called_once_with( - request={"instance": instance_pb, "update_mask": update_mask_pb} - ) - - self.assertIs(result, response) - - def test_delete(self): - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = self._make_one(self.INSTANCE_ID, client) - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - instance_api.delete_instance.return_value = None - client._instance_admin_client = instance_api - - result = instance.delete() - - instance_api.delete_instance.assert_called_once_with( - request={"name": instance.name} - ) - - self.assertIsNone(result) - - def test_get_iam_policy(self): - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.iam.v1 import policy_pb2 - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = self._make_one(self.INSTANCE_ID, client) - - version = 1 - etag = b"etag_v1" - members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] - bindings = [{"role": BIGTABLE_ADMIN_ROLE, "members": members}] - iam_policy = policy_pb2.Policy(version=version, etag=etag, bindings=bindings) - - # Patch the stub used by the API method. - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - client._instance_admin_client = instance_api - instance_api.get_iam_policy.return_value = iam_policy - - # Perform the method and check the result. - result = instance.get_iam_policy() - - instance_api.get_iam_policy.assert_called_once_with( - request={"resource": instance.name} - ) - self.assertEqual(result.version, version) - self.assertEqual(result.etag, etag) - admins = result.bigtable_admins - self.assertEqual(len(admins), len(members)) - for found, expected in zip(sorted(admins), sorted(members)): - self.assertEqual(found, expected) - - def test_get_iam_policy_w_requested_policy_version(self): - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.iam.v1 import policy_pb2, options_pb2 - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = self._make_one(self.INSTANCE_ID, client) - - version = 1 - etag = b"etag_v1" - members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] - bindings = [{"role": BIGTABLE_ADMIN_ROLE, "members": members}] - iam_policy = policy_pb2.Policy(version=version, etag=etag, bindings=bindings) - - # Patch the stub used by the API method. - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - client._instance_admin_client = instance_api - instance_api.get_iam_policy.return_value = iam_policy - - # Perform the method and check the result. - result = instance.get_iam_policy(requested_policy_version=3) - - instance_api.get_iam_policy.assert_called_once_with( - request={ - "resource": instance.name, - "options_": options_pb2.GetPolicyOptions(requested_policy_version=3), - } - ) - self.assertEqual(result.version, version) - self.assertEqual(result.etag, etag) - admins = result.bigtable_admins - self.assertEqual(len(admins), len(members)) - for found, expected in zip(sorted(admins), sorted(members)): - self.assertEqual(found, expected) - - def test_set_iam_policy(self): - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.iam.v1 import policy_pb2 - from google.cloud.bigtable.policy import Policy - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = self._make_one(self.INSTANCE_ID, client) - - version = 1 - etag = b"etag_v1" - members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] - bindings = [{"role": BIGTABLE_ADMIN_ROLE, "members": sorted(members)}] - iam_policy_pb = policy_pb2.Policy(version=version, etag=etag, bindings=bindings) - - # Patch the stub used by the API method. - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - instance_api.set_iam_policy.return_value = iam_policy_pb - client._instance_admin_client = instance_api - - # Perform the method and check the result. - iam_policy = Policy(etag=etag, version=version) - iam_policy[BIGTABLE_ADMIN_ROLE] = [ - Policy.user("user1@test.com"), - Policy.service_account("service_acc1@test.com"), - ] - - result = instance.set_iam_policy(iam_policy) - - instance_api.set_iam_policy.assert_called_once_with( - request={"resource": instance.name, "policy": iam_policy_pb} - ) - self.assertEqual(result.version, version) - self.assertEqual(result.etag, etag) - admins = result.bigtable_admins - self.assertEqual(len(admins), len(members)) - for found, expected in zip(sorted(admins), sorted(members)): - self.assertEqual(found, expected) - - def test_test_iam_permissions(self): - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.iam.v1 import iam_policy_pb2 - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = self._make_one(self.INSTANCE_ID, client) - - permissions = ["bigtable.tables.create", "bigtable.clusters.create"] - - response = iam_policy_pb2.TestIamPermissionsResponse(permissions=permissions) - - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - instance_api.test_iam_permissions.return_value = response - client._instance_admin_client = instance_api - - result = instance.test_iam_permissions(permissions) - - self.assertEqual(result, permissions) - instance_api.test_iam_permissions.assert_called_once_with( - request={"resource": instance.name, "permissions": permissions} - ) - - def test_cluster_factory(self): - from google.cloud.bigtable import enums - - CLUSTER_ID = "{}-cluster".format(self.INSTANCE_ID) - LOCATION_ID = "us-central1-c" - SERVE_NODES = 3 - STORAGE_TYPE = enums.StorageType.HDD - - instance = self._make_one(self.INSTANCE_ID, None) - - cluster = instance.cluster( - CLUSTER_ID, - location_id=LOCATION_ID, - serve_nodes=SERVE_NODES, - default_storage_type=STORAGE_TYPE, - ) - self.assertIsInstance(cluster, Cluster) - self.assertEqual(cluster.cluster_id, CLUSTER_ID) - self.assertEqual(cluster.location_id, LOCATION_ID) - self.assertIsNone(cluster._state) - self.assertEqual(cluster.serve_nodes, SERVE_NODES) - self.assertEqual(cluster.default_storage_type, STORAGE_TYPE) - - def test_list_clusters(self): - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.cloud.bigtable_admin_v2.types import ( - bigtable_instance_admin as messages_v2_pb2, - ) - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable.instance import Instance - from google.cloud.bigtable.instance import Cluster - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = Instance(self.INSTANCE_ID, client) - - failed_location = "FAILED" - cluster_id1 = "cluster-id1" - cluster_id2 = "cluster-id2" - cluster_path_template = "projects/{}/instances/{}/clusters/{}" - cluster_name1 = cluster_path_template.format( - self.PROJECT, self.INSTANCE_ID, cluster_id1 - ) - cluster_name2 = cluster_path_template.format( - self.PROJECT, self.INSTANCE_ID, cluster_id2 - ) - - # Create response_pb - response_pb = messages_v2_pb2.ListClustersResponse( - failed_locations=[failed_location], - clusters=[ - data_v2_pb2.Cluster(name=cluster_name1), - data_v2_pb2.Cluster(name=cluster_name2), - ], - ) - - # Patch the stub used by the API method. - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - instance_api.list_clusters.side_effect = [response_pb] - instance_api.cluster_path = cluster_path_template.format - client._instance_admin_client = instance_api - - # Perform the method and check the result. - clusters, failed_locations = instance.list_clusters() - - cluster_1, cluster_2 = clusters - - self.assertIsInstance(cluster_1, Cluster) - self.assertEqual(cluster_1.name, cluster_name1) - - self.assertIsInstance(cluster_2, Cluster) - self.assertEqual(cluster_2.name, cluster_name2) - - self.assertEqual(failed_locations, [failed_location]) - - def test_table_factory(self): - from google.cloud.bigtable.table import Table - - app_profile_id = "appProfileId1262094415" - instance = self._make_one(self.INSTANCE_ID, None) - - table = instance.table(self.TABLE_ID, app_profile_id=app_profile_id) - self.assertIsInstance(table, Table) - self.assertEqual(table.table_id, self.TABLE_ID) - self.assertEqual(table._instance, instance) - self.assertEqual(table._app_profile_id, app_profile_id) - - def _list_tables_helper(self, table_name=None): - from google.cloud.bigtable_admin_v2.types import table as table_data_v2_pb2 - from google.cloud.bigtable_admin_v2.types import ( - bigtable_table_admin as table_messages_v1_pb2, - ) - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - BigtableTableAdminClient, - ) - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - - table_api = mock.create_autospec(BigtableTableAdminClient) - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = self._make_one(self.INSTANCE_ID, client) - - instance_api.instance_path.return_value = instance.name - # Create response_pb - if table_name is None: - table_name = self.TABLE_NAME - - response_pb = table_messages_v1_pb2.ListTablesResponse( - tables=[table_data_v2_pb2.Table(name=table_name)] - ) - - # Patch the stub used by the API method. - client._table_admin_client = table_api - client._instance_admin_client = instance_api - bigtable_table_stub = client._table_admin_client - bigtable_table_stub.list_tables.side_effect = [response_pb] - - # Create expected_result. - expected_table = instance.table(self.TABLE_ID) - expected_result = [expected_table] - - # Perform the method and check the result. - result = instance.list_tables() - - self.assertEqual(result, expected_result) - - def test_list_tables(self): - self._list_tables_helper() - - def test_list_tables_failure_bad_split(self): - with self.assertRaises(ValueError): - self._list_tables_helper(table_name="wrong-format") - - def test_list_tables_failure_name_bad_before(self): - BAD_TABLE_NAME = ( - "nonempty-section-before" - + "projects/" - + self.PROJECT - + "/instances/" - + self.INSTANCE_ID - + "/tables/" - + self.TABLE_ID - ) - with self.assertRaises(ValueError): - self._list_tables_helper(table_name=BAD_TABLE_NAME) - - def test_app_profile_factory(self): - from google.cloud.bigtable.enums import RoutingPolicyType - - APP_PROFILE_ID_1 = "app-profile-id-1" - ANY = RoutingPolicyType.ANY - DESCRIPTION_1 = "routing policy any" - APP_PROFILE_ID_2 = "app-profile-id-2" - SINGLE = RoutingPolicyType.SINGLE - DESCRIPTION_2 = "routing policy single" - ALLOW_WRITES = True - CLUSTER_ID = "cluster-id" - - instance = self._make_one(self.INSTANCE_ID, None) - - app_profile1 = instance.app_profile( - APP_PROFILE_ID_1, routing_policy_type=ANY, description=DESCRIPTION_1 - ) - - app_profile2 = instance.app_profile( - APP_PROFILE_ID_2, - routing_policy_type=SINGLE, - description=DESCRIPTION_2, - cluster_id=CLUSTER_ID, - allow_transactional_writes=ALLOW_WRITES, - ) - self.assertEqual(app_profile1.app_profile_id, APP_PROFILE_ID_1) - self.assertIs(app_profile1._instance, instance) - self.assertEqual(app_profile1.routing_policy_type, ANY) - self.assertEqual(app_profile1.description, DESCRIPTION_1) - self.assertEqual(app_profile2.app_profile_id, APP_PROFILE_ID_2) - self.assertIs(app_profile2._instance, instance) - self.assertEqual(app_profile2.routing_policy_type, SINGLE) - self.assertEqual(app_profile2.description, DESCRIPTION_2) - self.assertEqual(app_profile2.cluster_id, CLUSTER_ID) - self.assertEqual(app_profile2.allow_transactional_writes, ALLOW_WRITES) - - def test_list_app_profiles(self): - from google.api_core.page_iterator import Iterator - from google.api_core.page_iterator import Page - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable.app_profile import AppProfile - - class _Iterator(Iterator): - def __init__(self, pages): - super(_Iterator, self).__init__(client=None) - self._pages = pages - - def _next_page(self): - if self._pages: - page, self._pages = self._pages[0], self._pages[1:] - return Page(self, page, self.item_to_value) - - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT, credentials=credentials, admin=True - ) - instance = self._make_one(self.INSTANCE_ID, client) - - # Setup Expected Response - app_profile_path_template = "projects/{}/instances/{}/appProfiles/{}" - app_profile_id1 = "app-profile-id1" - app_profile_id2 = "app-profile-id2" - app_profile_name1 = app_profile_path_template.format( - self.PROJECT, self.INSTANCE_ID, app_profile_id1 - ) - app_profile_name2 = app_profile_path_template.format( - self.PROJECT, self.INSTANCE_ID, app_profile_id2 - ) - routing_policy = data_v2_pb2.AppProfile.MultiClusterRoutingUseAny() - - app_profiles = [ - data_v2_pb2.AppProfile( - name=app_profile_name1, multi_cluster_routing_use_any=routing_policy - ), - data_v2_pb2.AppProfile( - name=app_profile_name2, multi_cluster_routing_use_any=routing_policy - ), - ] - iterator = _Iterator(pages=[app_profiles]) - - # Patch the stub used by the API method. - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - client._instance_admin_client = instance_api - instance_api.app_profile_path = app_profile_path_template.format - instance_api.list_app_profiles.return_value = iterator - - # Perform the method and check the result. - app_profiles = instance.list_app_profiles() - - app_profile_1, app_profile_2 = app_profiles - - self.assertIsInstance(app_profile_1, AppProfile) - self.assertEqual(app_profile_1.name, app_profile_name1) - - self.assertIsInstance(app_profile_2, AppProfile) - self.assertEqual(app_profile_2.name, app_profile_name2) + ), + ] + + result = instance.create(clusters=clusters) + + assert result is response + + cluster_pb_1 = cluster_pb( + location=api.location_path(PROJECT, location_id_1), + serve_nodes=serve_nodes_1, + default_storage_type=enums.StorageType.UNSPECIFIED, + ) + cluster_pb_2 = cluster_pb( + location=api.location_path(PROJECT, location_id_2), + serve_nodes=serve_nodes_2, + default_storage_type=enums.StorageType.UNSPECIFIED, + ) + instance_pb = instance_pb( + display_name=DISPLAY_NAME, type_=enums.Instance.Type.PRODUCTION, labels=LABELS, + ) + api.create_instance.assert_called_once_with( + request={ + "parent": api.project_path(PROJECT), + "instance_id": INSTANCE_ID, + "instance": instance_pb, + "clusters": {cluster_id_1: cluster_pb_1, cluster_id_2: cluster_pb_2}, + } + ) + + +def test_instance_exists_hit(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + response_pb = data_v2_pb2.Instance(name=INSTANCE_NAME) + api = client._instance_admin_client = _make_instance_admin_api() + api.instance_path.return_value = INSTANCE_NAME + api.get_instance.return_value = response_pb + instance = _make_instance(INSTANCE_ID, client) + + assert instance.exists() + + api.get_instance.assert_called_once_with(request={"name": INSTANCE_NAME}) + + +def test_instance_exists_miss(): + from google.api_core import exceptions + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + + api = client._instance_admin_client = _make_instance_admin_api() + api.instance_path.return_value = INSTANCE_NAME + api.get_instance.side_effect = exceptions.NotFound("testing") + + non_existing_instance_id = "instance-id-2" + instance = _make_instance(non_existing_instance_id, client) + + assert not instance.exists() + + api.get_instance.assert_called_once_with(request={"name": INSTANCE_NAME}) + + +def test_instance_exists_w_error(): + from google.api_core import exceptions + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + + api = client._instance_admin_client = _make_instance_admin_api() + api.instance_path.return_value = INSTANCE_NAME + api.get_instance.side_effect = exceptions.BadRequest("testing") + instance = _make_instance(INSTANCE_ID, client) + + with pytest.raises(exceptions.BadRequest): + instance.exists() + + api.get_instance.assert_called_once_with(request={"name": INSTANCE_NAME}) + + +def test_instance_reload(): + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable import enums + + DISPLAY_NAME = u"hey-hi-hello" + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = _make_instance(INSTANCE_ID, client) + response_pb = data_v2_pb2.Instance( + display_name=DISPLAY_NAME, type_=enums.Instance.Type.PRODUCTION, labels=LABELS + ) + api = client._instance_admin_client = _make_instance_admin_api() + api.get_instance.side_effect = [response_pb] + assert instance.display_name == INSTANCE_ID + + result = instance.reload() + + assert result is None + assert instance.display_name == DISPLAY_NAME + + +def _instance_api_response_for_update(): + import datetime + from google.api_core import operation + from google.longrunning import operations_pb2 + from google.protobuf.any_pb2 import Any + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) + from google.cloud.bigtable_admin_v2.types import instance + + NOW = datetime.datetime.utcnow() + NOW_PB = _datetime_to_pb_timestamp(NOW) + metadata = messages_v2_pb2.UpdateInstanceMetadata(request_time=NOW_PB) + type_url = "type.googleapis.com/{}".format( + messages_v2_pb2.UpdateInstanceMetadata._meta._pb.DESCRIPTOR.full_name + ) + response_pb = operations_pb2.Operation( + name=OP_NAME, + metadata=Any(type_url=type_url, value=metadata._pb.SerializeToString()), + ) + response = operation.from_gapic( + response_pb, + mock.Mock(), + instance.Instance, + metadata_type=messages_v2_pb2.UpdateInstanceMetadata, + ) + instance_path_template = "projects/{project}/instances/{instance}" + api = _make_instance_admin_api() + api.partial_update_instance.return_value = response + api.instance_path = instance_path_template.format + return api, response + + +def test_instance_update(): + from google.cloud.bigtable import enums + from google.protobuf import field_mask_pb2 + from google.cloud.bigtable_admin_v2.types import Instance + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = _make_instance( + INSTANCE_ID, + client, + display_name=DISPLAY_NAME, + instance_type=enums.Instance.Type.DEVELOPMENT, + labels=LABELS, + ) + api, response = _instance_api_response_for_update() + client._instance_admin_client = api + + result = instance.update() + + assert result is response + + instance_pb = Instance( + name=instance.name, + display_name=instance.display_name, + type_=instance.type_, + labels=instance.labels, + ) + update_mask_pb = field_mask_pb2.FieldMask(paths=["display_name", "type", "labels"]) + + api.partial_update_instance.assert_called_once_with( + request={"instance": instance_pb, "update_mask": update_mask_pb} + ) + + +def test_instance_update_empty(): + from google.protobuf import field_mask_pb2 + from google.cloud.bigtable_admin_v2.types import Instance + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = _make_instance(None, client) + api, response = _instance_api_response_for_update() + client._instance_admin_client = api + + result = instance.update() + + assert result is response + + instance_pb = Instance( + name=instance.name, + display_name=instance.display_name, + type_=instance.type_, + labels=instance.labels, + ) + update_mask_pb = field_mask_pb2.FieldMask() + + api.partial_update_instance.assert_called_once_with( + request={"instance": instance_pb, "update_mask": update_mask_pb} + ) + + +def test_instance_delete(): + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = _make_instance(INSTANCE_ID, client) + api = client._instance_admin_client = _make_instance_admin_api() + api.delete_instance.return_value = None + + result = instance.delete() + + assert result is None + + api.delete_instance.assert_called_once_with(request={"name": instance.name}) + + +def test_instance_get_iam_policy(): + from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = _make_instance(INSTANCE_ID, client) + + version = 1 + etag = b"etag_v1" + members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] + bindings = [{"role": BIGTABLE_ADMIN_ROLE, "members": members}] + iam_policy = policy_pb2.Policy(version=version, etag=etag, bindings=bindings) + api = client._instance_admin_client = _make_instance_admin_api() + api.get_iam_policy.return_value = iam_policy + + result = instance.get_iam_policy() + + assert result.version == version + assert result.etag == etag + admins = result.bigtable_admins + assert len(admins) == len(members) + + for found, expected in zip(sorted(admins), sorted(members)): + assert found == expected + api.get_iam_policy.assert_called_once_with(request={"resource": instance.name}) + + +def test_instance_get_iam_policy_w_requested_policy_version(): + from google.iam.v1 import policy_pb2, options_pb2 + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = _make_instance(INSTANCE_ID, client) + + version = 1 + etag = b"etag_v1" + members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] + bindings = [{"role": BIGTABLE_ADMIN_ROLE, "members": members}] + iam_policy = policy_pb2.Policy(version=version, etag=etag, bindings=bindings) + + api = client._instance_admin_client = _make_instance_admin_api() + api.get_iam_policy.return_value = iam_policy + + result = instance.get_iam_policy(requested_policy_version=3) + + assert result.version == version + assert result.etag == etag + admins = result.bigtable_admins + assert len(admins) == len(members) + for found, expected in zip(sorted(admins), sorted(members)): + assert found == expected + + api.get_iam_policy.assert_called_once_with( + request={ + "resource": instance.name, + "options_": options_pb2.GetPolicyOptions(requested_policy_version=3), + } + ) + + +def test_instance_set_iam_policy(): + from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.policy import Policy + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = _make_instance(INSTANCE_ID, client) + + version = 1 + etag = b"etag_v1" + members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] + bindings = [{"role": BIGTABLE_ADMIN_ROLE, "members": sorted(members)}] + iam_policy_pb = policy_pb2.Policy(version=version, etag=etag, bindings=bindings) + + api = client._instance_admin_client = _make_instance_admin_api() + api.set_iam_policy.return_value = iam_policy_pb + iam_policy = Policy(etag=etag, version=version) + iam_policy[BIGTABLE_ADMIN_ROLE] = [ + Policy.user("user1@test.com"), + Policy.service_account("service_acc1@test.com"), + ] + + result = instance.set_iam_policy(iam_policy) + + api.set_iam_policy.assert_called_once_with( + request={"resource": instance.name, "policy": iam_policy_pb} + ) + assert result.version == version + assert result.etag == etag + admins = result.bigtable_admins + assert len(admins) == len(members) + for found, expected in zip(sorted(admins), sorted(members)): + assert found == expected + + +def test_instance_test_iam_permissions(): + from google.iam.v1 import iam_policy_pb2 + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = _make_instance(INSTANCE_ID, client) + + permissions = ["bigtable.tables.create", "bigtable.clusters.create"] + + response = iam_policy_pb2.TestIamPermissionsResponse(permissions=permissions) + api = client._instance_admin_client = _make_instance_admin_api() + api.test_iam_permissions.return_value = response + + result = instance.test_iam_permissions(permissions) + + assert result == permissions + api.test_iam_permissions.assert_called_once_with( + request={"resource": instance.name, "permissions": permissions} + ) + + +def test_instance_cluster_factory(): + from google.cloud.bigtable import enums + + CLUSTER_ID = "{}-cluster".format(INSTANCE_ID) + LOCATION_ID = "us-central1-c" + SERVE_NODES = 3 + STORAGE_TYPE = enums.StorageType.HDD + + instance = _make_instance(INSTANCE_ID, None) + + cluster = instance.cluster( + CLUSTER_ID, + location_id=LOCATION_ID, + serve_nodes=SERVE_NODES, + default_storage_type=STORAGE_TYPE, + ) + assert isinstance(cluster, Cluster) + assert cluster.cluster_id == CLUSTER_ID + assert cluster.location_id == LOCATION_ID + assert cluster._state is None + assert cluster.serve_nodes == SERVE_NODES + assert cluster.default_storage_type == STORAGE_TYPE + + +def test_instance_list_clusters(): + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable.instance import Cluster + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = Instance(INSTANCE_ID, client) + + failed_location = "FAILED" + cluster_id1 = "cluster-id1" + cluster_id2 = "cluster-id2" + cluster_path_template = "projects/{}/instances/{}/clusters/{}" + cluster_name1 = cluster_path_template.format(PROJECT, INSTANCE_ID, cluster_id1) + cluster_name2 = cluster_path_template.format(PROJECT, INSTANCE_ID, cluster_id2) + response_pb = messages_v2_pb2.ListClustersResponse( + failed_locations=[failed_location], + clusters=[ + data_v2_pb2.Cluster(name=cluster_name1), + data_v2_pb2.Cluster(name=cluster_name2), + ], + ) + api = client._instance_admin_client = _make_instance_admin_api() + api.list_clusters.side_effect = [response_pb] + api.cluster_path = cluster_path_template.format + + # Perform the method and check the result. + clusters, failed_locations = instance.list_clusters() + + cluster_1, cluster_2 = clusters + + assert isinstance(cluster_1, Cluster) + assert cluster_1.name == cluster_name1 + + assert isinstance(cluster_2, Cluster) + assert cluster_2.name == cluster_name2 + + assert failed_locations == [failed_location] + + +def test_instance_table_factory(): + from google.cloud.bigtable.table import Table + + app_profile_id = "appProfileId1262094415" + instance = _make_instance(INSTANCE_ID, None) + + table = instance.table(TABLE_ID, app_profile_id=app_profile_id) + assert isinstance(table, Table) + assert table.table_id == TABLE_ID + assert table._instance == instance + assert table._app_profile_id == app_profile_id + + +def _list_tables_helper(table_name=None): + from google.cloud.bigtable_admin_v2.types import table as table_data_v2_pb2 + from google.cloud.bigtable_admin_v2.types import ( + bigtable_table_admin as table_messages_v1_pb2, + ) + from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( + BigtableTableAdminClient, + ) + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = _make_instance(INSTANCE_ID, client) + + instance_api = client._instance_admin_client = _make_instance_admin_api() + instance_api.instance_path.return_value = "projects/project/instances/instance-id" + table_api = client._table_admin_client = mock.create_autospec( + BigtableTableAdminClient + ) + if table_name is None: + table_name = TABLE_NAME + + response_pb = table_messages_v1_pb2.ListTablesResponse( + tables=[table_data_v2_pb2.Table(name=table_name)] + ) + + table_api.list_tables.side_effect = [response_pb] + + result = instance.list_tables() + + expected_table = instance.table(TABLE_ID) + assert result == [expected_table] + + +def test_instance_list_tables(): + _list_tables_helper() + + +def test_instance_list_tables_failure_bad_split(): + with pytest.raises(ValueError): + _list_tables_helper(table_name="wrong-format") + + +def test_instance_list_tables_failure_name_bad_before(): + BAD_TABLE_NAME = ( + "nonempty-section-before" + + "projects/" + + PROJECT + + "/instances/" + + INSTANCE_ID + + "/tables/" + + TABLE_ID + ) + with pytest.raises(ValueError): + _list_tables_helper(table_name=BAD_TABLE_NAME) + + +def test_instance_app_profile_factory(): + from google.cloud.bigtable.enums import RoutingPolicyType + + instance = _make_instance(INSTANCE_ID, None) + + app_profile1 = instance.app_profile( + APP_PROFILE_ID_1, + routing_policy_type=RoutingPolicyType.ANY, + description=DESCRIPTION_1, + ) + + app_profile2 = instance.app_profile( + APP_PROFILE_ID_2, + routing_policy_type=RoutingPolicyType.SINGLE, + description=DESCRIPTION_2, + cluster_id=CLUSTER_ID, + allow_transactional_writes=ALLOW_WRITES, + ) + assert app_profile1.app_profile_id == APP_PROFILE_ID_1 + assert app_profile1._instance is instance + assert app_profile1.routing_policy_type == RoutingPolicyType.ANY + assert app_profile1.description == DESCRIPTION_1 + assert app_profile2.app_profile_id == APP_PROFILE_ID_2 + assert app_profile2._instance is instance + assert app_profile2.routing_policy_type == RoutingPolicyType.SINGLE + assert app_profile2.description == DESCRIPTION_2 + assert app_profile2.cluster_id == CLUSTER_ID + assert app_profile2.allow_transactional_writes == ALLOW_WRITES + + +def test_instance_list_app_profiles(): + from google.api_core.page_iterator import Iterator + from google.api_core.page_iterator import Page + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.cloud.bigtable.app_profile import AppProfile + + class _Iterator(Iterator): + def __init__(self, pages): + super(_Iterator, self).__init__(client=None) + self._pages = pages + + def _next_page(self): + if self._pages: + page, self._pages = self._pages[0], self._pages[1:] + return Page(self, page, self.item_to_value) + + credentials = _make_credentials() + client = _make_client(project=PROJECT, credentials=credentials, admin=True) + instance = _make_instance(INSTANCE_ID, client) + + # Setup Expected Response + app_profile_path_template = "projects/{}/instances/{}/appProfiles/{}" + app_profile_id1 = "app-profile-id1" + app_profile_id2 = "app-profile-id2" + app_profile_name1 = app_profile_path_template.format( + PROJECT, INSTANCE_ID, app_profile_id1 + ) + app_profile_name2 = app_profile_path_template.format( + PROJECT, INSTANCE_ID, app_profile_id2 + ) + routing_policy = data_v2_pb2.AppProfile.MultiClusterRoutingUseAny() + + app_profiles = [ + data_v2_pb2.AppProfile( + name=app_profile_name1, multi_cluster_routing_use_any=routing_policy + ), + data_v2_pb2.AppProfile( + name=app_profile_name2, multi_cluster_routing_use_any=routing_policy + ), + ] + iterator = _Iterator(pages=[app_profiles]) + + # Patch the stub used by the API method. + api = _make_instance_admin_api() + client._instance_admin_client = api + api.app_profile_path = app_profile_path_template.format + api.list_app_profiles.return_value = iterator + + # Perform the method and check the result. + app_profiles = instance.list_app_profiles() + + app_profile_1, app_profile_2 = app_profiles + + assert isinstance(app_profile_1, AppProfile) + assert app_profile_1.name == app_profile_name1 + + assert isinstance(app_profile_2, AppProfile) + assert app_profile_2.name == app_profile_name2 From db5fac9c6fd2265eb22eaadfbdffe4b59adf5eda Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 20 Oct 2021 17:27:54 -0400 Subject: [PATCH 10/16] tests: refactor 'policy' unit tests w/ pytest idioms --- tests/unit/test_policy.py | 522 +++++++++++++++++++------------------- 1 file changed, 263 insertions(+), 259 deletions(-) diff --git a/tests/unit/test_policy.py b/tests/unit/test_policy.py index 63f9ba03f..1b1adbed5 100644 --- a/tests/unit/test_policy.py +++ b/tests/unit/test_policy.py @@ -12,263 +12,267 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest - - -class TestPolicy(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.policy import Policy - - return Policy - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - empty = frozenset() - policy = self._make_one() - self.assertIsNone(policy.etag) - self.assertIsNone(policy.version) - self.assertEqual(policy.bigtable_admins, empty) - self.assertEqual(policy.bigtable_readers, empty) - self.assertEqual(policy.bigtable_users, empty) - self.assertEqual(policy.bigtable_viewers, empty) - self.assertEqual(len(policy), 0) - self.assertEqual(dict(policy), {}) - - def test_ctor_explicit(self): - VERSION = 1 - ETAG = b"ETAG" - empty = frozenset() - policy = self._make_one(ETAG, VERSION) - self.assertEqual(policy.etag, ETAG) - self.assertEqual(policy.version, VERSION) - self.assertEqual(policy.bigtable_admins, empty) - self.assertEqual(policy.bigtable_readers, empty) - self.assertEqual(policy.bigtable_users, empty) - self.assertEqual(policy.bigtable_viewers, empty) - self.assertEqual(len(policy), 0) - self.assertEqual(dict(policy), {}) - - def test_bigtable_admins_getter(self): - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE - - MEMBER = "user:phred@example.com" - expected = frozenset([MEMBER]) - policy = self._make_one() - policy[BIGTABLE_ADMIN_ROLE] = [MEMBER] - self.assertEqual(policy.bigtable_admins, expected) - - def test_bigtable_readers_getter(self): - from google.cloud.bigtable.policy import BIGTABLE_READER_ROLE - - MEMBER = "user:phred@example.com" - expected = frozenset([MEMBER]) - policy = self._make_one() - policy[BIGTABLE_READER_ROLE] = [MEMBER] - self.assertEqual(policy.bigtable_readers, expected) - - def test_bigtable_users_getter(self): - from google.cloud.bigtable.policy import BIGTABLE_USER_ROLE - - MEMBER = "user:phred@example.com" - expected = frozenset([MEMBER]) - policy = self._make_one() - policy[BIGTABLE_USER_ROLE] = [MEMBER] - self.assertEqual(policy.bigtable_users, expected) - - def test_bigtable_viewers_getter(self): - from google.cloud.bigtable.policy import BIGTABLE_VIEWER_ROLE - - MEMBER = "user:phred@example.com" - expected = frozenset([MEMBER]) - policy = self._make_one() - policy[BIGTABLE_VIEWER_ROLE] = [MEMBER] - self.assertEqual(policy.bigtable_viewers, expected) - - def test_from_pb_empty(self): - from google.iam.v1 import policy_pb2 - - empty = frozenset() - message = policy_pb2.Policy() - klass = self._get_target_class() - policy = klass.from_pb(message) - self.assertEqual(policy.etag, b"") - self.assertEqual(policy.version, 0) - self.assertEqual(policy.bigtable_admins, empty) - self.assertEqual(policy.bigtable_readers, empty) - self.assertEqual(policy.bigtable_users, empty) - self.assertEqual(policy.bigtable_viewers, empty) - self.assertEqual(len(policy), 0) - self.assertEqual(dict(policy), {}) - - def test_from_pb_non_empty(self): - from google.iam.v1 import policy_pb2 - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE - - ETAG = b"ETAG" - VERSION = 1 - members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] - empty = frozenset() - message = policy_pb2.Policy( - etag=ETAG, - version=VERSION, - bindings=[{"role": BIGTABLE_ADMIN_ROLE, "members": members}], - ) - klass = self._get_target_class() - policy = klass.from_pb(message) - self.assertEqual(policy.etag, ETAG) - self.assertEqual(policy.version, VERSION) - self.assertEqual(policy.bigtable_admins, set(members)) - self.assertEqual(policy.bigtable_readers, empty) - self.assertEqual(policy.bigtable_users, empty) - self.assertEqual(policy.bigtable_viewers, empty) - self.assertEqual(len(policy), 1) - self.assertEqual(dict(policy), {BIGTABLE_ADMIN_ROLE: set(members)}) - - def test_from_pb_with_condition(self): - import pytest - from google.iam.v1 import policy_pb2 - from google.api_core.iam import InvalidOperationException, _DICT_ACCESS_MSG - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE - - ETAG = b"ETAG" - VERSION = 3 - members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] - BINDINGS = [ - { - "role": BIGTABLE_ADMIN_ROLE, - "members": members, - "condition": { - "title": "request_time", - "description": "Requests made before 2021-01-01T00:00:00Z", - "expression": 'request.time < timestamp("2021-01-01T00:00:00Z")', - }, - } - ] - message = policy_pb2.Policy(etag=ETAG, version=VERSION, bindings=BINDINGS,) - klass = self._get_target_class() - policy = klass.from_pb(message) - self.assertEqual(policy.etag, ETAG) - self.assertEqual(policy.version, VERSION) - self.assertEqual(policy.bindings[0]["role"], BIGTABLE_ADMIN_ROLE) - self.assertEqual(policy.bindings[0]["members"], set(members)) - self.assertEqual(policy.bindings[0]["condition"], BINDINGS[0]["condition"]) - with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG): - policy.bigtable_admins - with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG): - policy.bigtable_readers - with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG): - policy.bigtable_users - with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG): - policy.bigtable_viewers - with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG): - len(policy) - - def test_to_pb_empty(self): - from google.iam.v1 import policy_pb2 - - policy = self._make_one() - expected = policy_pb2.Policy() - - self.assertEqual(policy.to_pb(), expected) - - def test_to_pb_explicit(self): - from google.iam.v1 import policy_pb2 - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE - - VERSION = 1 - ETAG = b"ETAG" - members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] - policy = self._make_one(ETAG, VERSION) - policy[BIGTABLE_ADMIN_ROLE] = members - expected = policy_pb2.Policy( - etag=ETAG, - version=VERSION, - bindings=[ - policy_pb2.Binding(role=BIGTABLE_ADMIN_ROLE, members=sorted(members)) - ], - ) - - self.assertEqual(policy.to_pb(), expected) - - def test_to_pb_with_condition(self): - from google.iam.v1 import policy_pb2 - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE - - VERSION = 3 - ETAG = b"ETAG" - members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] - condition = { - "title": "request_time", - "description": "Requests made before 2021-01-01T00:00:00Z", - "expression": 'request.time < timestamp("2021-01-01T00:00:00Z")', + +def _make_policy(*args, **kw): + from google.cloud.bigtable.policy import Policy + + return Policy(*args, **kw) + + +def test_policy_ctor_defaults(): + empty = frozenset() + policy = _make_policy() + assert policy.etag is None + assert policy.version is None + assert policy.bigtable_admins == empty + assert policy.bigtable_readers == empty + assert policy.bigtable_users == empty + assert policy.bigtable_viewers == empty + assert len(policy) == 0 + assert dict(policy) == {} + + +def test_policy_ctor_explicit(): + VERSION = 1 + ETAG = b"ETAG" + empty = frozenset() + policy = _make_policy(ETAG, VERSION) + assert policy.etag == ETAG + assert policy.version == VERSION + assert policy.bigtable_admins == empty + assert policy.bigtable_readers == empty + assert policy.bigtable_users == empty + assert policy.bigtable_viewers == empty + assert len(policy) == 0 + assert dict(policy) == {} + + +def test_policy_bigtable_admins(): + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE + + MEMBER = "user:phred@example.com" + expected = frozenset([MEMBER]) + policy = _make_policy() + policy[BIGTABLE_ADMIN_ROLE] = [MEMBER] + assert policy.bigtable_admins == expected + + +def test_policy_bigtable_readers(): + from google.cloud.bigtable.policy import BIGTABLE_READER_ROLE + + MEMBER = "user:phred@example.com" + expected = frozenset([MEMBER]) + policy = _make_policy() + policy[BIGTABLE_READER_ROLE] = [MEMBER] + assert policy.bigtable_readers == expected + + +def test_policy_bigtable_users(): + from google.cloud.bigtable.policy import BIGTABLE_USER_ROLE + + MEMBER = "user:phred@example.com" + expected = frozenset([MEMBER]) + policy = _make_policy() + policy[BIGTABLE_USER_ROLE] = [MEMBER] + assert policy.bigtable_users == expected + + +def test_policy_bigtable_viewers(): + from google.cloud.bigtable.policy import BIGTABLE_VIEWER_ROLE + + MEMBER = "user:phred@example.com" + expected = frozenset([MEMBER]) + policy = _make_policy() + policy[BIGTABLE_VIEWER_ROLE] = [MEMBER] + assert policy.bigtable_viewers == expected + + +def test_policy_from_pb_w_empty(): + from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.policy import Policy + + empty = frozenset() + message = policy_pb2.Policy() + policy = Policy.from_pb(message) + assert policy.etag == b"" + assert policy.version == 0 + assert policy.bigtable_admins == empty + assert policy.bigtable_readers == empty + assert policy.bigtable_users == empty + assert policy.bigtable_viewers == empty + assert len(policy) == 0 + assert dict(policy) == {} + + +def test_policy_from_pb_w_non_empty(): + from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE + from google.cloud.bigtable.policy import Policy + + ETAG = b"ETAG" + VERSION = 1 + members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] + empty = frozenset() + message = policy_pb2.Policy( + etag=ETAG, + version=VERSION, + bindings=[{"role": BIGTABLE_ADMIN_ROLE, "members": members}], + ) + policy = Policy.from_pb(message) + assert policy.etag == ETAG + assert policy.version == VERSION + assert policy.bigtable_admins == set(members) + assert policy.bigtable_readers == empty + assert policy.bigtable_users == empty + assert policy.bigtable_viewers == empty + assert len(policy) == 1 + assert dict(policy) == {BIGTABLE_ADMIN_ROLE: set(members)} + + +def test_policy_from_pb_w_condition(): + import pytest + from google.iam.v1 import policy_pb2 + from google.api_core.iam import InvalidOperationException, _DICT_ACCESS_MSG + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE + from google.cloud.bigtable.policy import Policy + + ETAG = b"ETAG" + VERSION = 3 + members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] + BINDINGS = [ + { + "role": BIGTABLE_ADMIN_ROLE, + "members": members, + "condition": { + "title": "request_time", + "description": "Requests made before 2021-01-01T00:00:00Z", + "expression": 'request.time < timestamp("2021-01-01T00:00:00Z")', + }, } - policy = self._make_one(ETAG, VERSION) - policy.bindings = [ - { - "role": BIGTABLE_ADMIN_ROLE, - "members": set(members), - "condition": condition, - } - ] - expected = policy_pb2.Policy( - etag=ETAG, - version=VERSION, - bindings=[ - policy_pb2.Binding( - role=BIGTABLE_ADMIN_ROLE, - members=sorted(members), - condition=condition, - ) - ], - ) - - self.assertEqual(policy.to_pb(), expected) - - def test_from_api_repr_wo_etag(self): - VERSION = 1 - empty = frozenset() - resource = {"version": VERSION} - klass = self._get_target_class() - policy = klass.from_api_repr(resource) - self.assertIsNone(policy.etag) - self.assertEqual(policy.version, VERSION) - self.assertEqual(policy.bigtable_admins, empty) - self.assertEqual(policy.bigtable_readers, empty) - self.assertEqual(policy.bigtable_users, empty) - self.assertEqual(policy.bigtable_viewers, empty) - self.assertEqual(len(policy), 0) - self.assertEqual(dict(policy), {}) - - def test_from_api_repr_w_etag(self): - import base64 - - ETAG = b"ETAG" - empty = frozenset() - resource = {"etag": base64.b64encode(ETAG).decode("ascii")} - klass = self._get_target_class() - policy = klass.from_api_repr(resource) - self.assertEqual(policy.etag, ETAG) - self.assertIsNone(policy.version) - self.assertEqual(policy.bigtable_admins, empty) - self.assertEqual(policy.bigtable_readers, empty) - self.assertEqual(policy.bigtable_users, empty) - self.assertEqual(policy.bigtable_viewers, empty) - self.assertEqual(len(policy), 0) - self.assertEqual(dict(policy), {}) - - def test_to_api_repr_wo_etag(self): - VERSION = 1 - resource = {"version": VERSION} - policy = self._make_one(version=VERSION) - self.assertEqual(policy.to_api_repr(), resource) - - def test_to_api_repr_w_etag(self): - import base64 - - ETAG = b"ETAG" - policy = self._make_one(etag=ETAG) - resource = {"etag": base64.b64encode(ETAG).decode("ascii")} - self.assertEqual(policy.to_api_repr(), resource) + ] + message = policy_pb2.Policy(etag=ETAG, version=VERSION, bindings=BINDINGS,) + policy = Policy.from_pb(message) + assert policy.etag == ETAG + assert policy.version == VERSION + assert policy.bindings[0]["role"] == BIGTABLE_ADMIN_ROLE + assert policy.bindings[0]["members"] == set(members) + assert policy.bindings[0]["condition"] == BINDINGS[0]["condition"] + with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG): + policy.bigtable_admins + with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG): + policy.bigtable_readers + with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG): + policy.bigtable_users + with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG): + policy.bigtable_viewers + with pytest.raises(InvalidOperationException, match=_DICT_ACCESS_MSG): + len(policy) + + +def test_policy_to_pb_empty(): + from google.iam.v1 import policy_pb2 + + policy = _make_policy() + expected = policy_pb2.Policy() + + assert policy.to_pb() == expected + + +def test_policy_to_pb_explicit(): + from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE + + VERSION = 1 + ETAG = b"ETAG" + members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] + policy = _make_policy(ETAG, VERSION) + policy[BIGTABLE_ADMIN_ROLE] = members + expected = policy_pb2.Policy( + etag=ETAG, + version=VERSION, + bindings=[ + policy_pb2.Binding(role=BIGTABLE_ADMIN_ROLE, members=sorted(members)) + ], + ) + + assert policy.to_pb() == expected + + +def test_policy_to_pb_w_condition(): + from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE + + VERSION = 3 + ETAG = b"ETAG" + members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] + condition = { + "title": "request_time", + "description": "Requests made before 2021-01-01T00:00:00Z", + "expression": 'request.time < timestamp("2021-01-01T00:00:00Z")', + } + policy = _make_policy(ETAG, VERSION) + policy.bindings = [ + {"role": BIGTABLE_ADMIN_ROLE, "members": set(members), "condition": condition} + ] + expected = policy_pb2.Policy( + etag=ETAG, + version=VERSION, + bindings=[ + policy_pb2.Binding( + role=BIGTABLE_ADMIN_ROLE, members=sorted(members), condition=condition, + ) + ], + ) + + assert policy.to_pb() == expected + + +def test_policy_from_api_repr_wo_etag(): + from google.cloud.bigtable.policy import Policy + + VERSION = 1 + empty = frozenset() + resource = {"version": VERSION} + policy = Policy.from_api_repr(resource) + assert policy.etag is None + assert policy.version == VERSION + assert policy.bigtable_admins == empty + assert policy.bigtable_readers == empty + assert policy.bigtable_users == empty + assert policy.bigtable_viewers == empty + assert len(policy) == 0 + assert dict(policy) == {} + + +def test_policy_from_api_repr_w_etag(): + import base64 + from google.cloud.bigtable.policy import Policy + + ETAG = b"ETAG" + empty = frozenset() + resource = {"etag": base64.b64encode(ETAG).decode("ascii")} + policy = Policy.from_api_repr(resource) + assert policy.etag == ETAG + assert policy.version is None + assert policy.bigtable_admins == empty + assert policy.bigtable_readers == empty + assert policy.bigtable_users == empty + assert policy.bigtable_viewers == empty + assert len(policy) == 0 + assert dict(policy) == {} + + +def test_policy_to_api_repr_wo_etag(): + VERSION = 1 + resource = {"version": VERSION} + policy = _make_policy(version=VERSION) + assert policy.to_api_repr() == resource + + +def test_policy_to_api_repr_w_etag(): + import base64 + + ETAG = b"ETAG" + policy = _make_policy(etag=ETAG) + resource = {"etag": base64.b64encode(ETAG).decode("ascii")} + assert policy.to_api_repr() == resource From 19b091ba69eee7b5c4d31056760830da0d979203 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 20 Oct 2021 17:47:09 -0400 Subject: [PATCH 11/16] tests: refactor 'row' unit tests w/ pytest idioms --- tests/unit/test_row.py | 1309 +++++++++++++++++++--------------------- 1 file changed, 636 insertions(+), 673 deletions(-) diff --git a/tests/unit/test_row.py b/tests/unit/test_row.py index 1f33f214b..774756314 100644 --- a/tests/unit/test_row.py +++ b/tests/unit/test_row.py @@ -13,763 +13,726 @@ # limitations under the License. -import unittest - import mock +import pytest from ._testing import _make_credentials -class TestRow(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row import Row +def _make_client(*args, **kwargs): + from google.cloud.bigtable.client import Client - return Row + return Client(*args, **kwargs) - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - def test_row_key_getter(self): - row = self._make_one(row_key=b"row_key", table="table") - self.assertEqual(b"row_key", row.row_key) +def _make_row(*args, **kwargs): + from google.cloud.bigtable.row import Row - def test_row_table_getter(self): - row = self._make_one(row_key=b"row_key", table="table") - self.assertEqual("table", row.table) + return Row(*args, **kwargs) -class Test_SetDeleteRow(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row import _SetDeleteRow +def test_row_key_getter(): + row = _make_row(row_key=b"row_key", table="table") + assert b"row_key" == row.row_key - return _SetDeleteRow - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) +def test_row_table_getter(): + row = _make_row(row_key=b"row_key", table="table") + assert "table" == row.table - def test__get_mutations_virtual(self): - row = self._make_one(b"row-key", None) - with self.assertRaises(NotImplementedError): - row._get_mutations(None) +def _make__set_delete_row(*args, **kwargs): + from google.cloud.bigtable.row import _SetDeleteRow -class TestDirectRow(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row import DirectRow + return _SetDeleteRow(*args, **kwargs) - return DirectRow - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) +def test__set_detlete_row__get_mutations_virtual(): + row = _make__set_delete_row(b"row-key", None) + with pytest.raises(NotImplementedError): + row._get_mutations(None) - @staticmethod - def _get_target_client_class(): - from google.cloud.bigtable.client import Client - return Client +def _make_direct_row(*args, **kwargs): + from google.cloud.bigtable.row import DirectRow - def _make_client(self, *args, **kwargs): - return self._get_target_client_class()(*args, **kwargs) + return DirectRow(*args, **kwargs) - def test_constructor(self): - row_key = b"row_key" - table = object() - row = self._make_one(row_key, table) - self.assertEqual(row._row_key, row_key) - self.assertIs(row._table, table) - self.assertEqual(row._pb_mutations, []) +def test_direct_row_constructor(): + row_key = b"row_key" + table = object() - def test_constructor_with_unicode(self): - row_key = u"row_key" - row_key_bytes = b"row_key" - table = object() + row = _make_direct_row(row_key, table) + assert row._row_key == row_key + assert row._table is table + assert row._pb_mutations == [] - row = self._make_one(row_key, table) - self.assertEqual(row._row_key, row_key_bytes) - self.assertIs(row._table, table) - def test_constructor_with_non_bytes(self): - row_key = object() - with self.assertRaises(TypeError): - self._make_one(row_key, None) +def test_direct_row_constructor_with_unicode(): + row_key = u"row_key" + row_key_bytes = b"row_key" + table = object() - def test__get_mutations(self): - row_key = b"row_key" - row = self._make_one(row_key, None) + row = _make_direct_row(row_key, table) + assert row._row_key == row_key_bytes + assert row._table is table - row._pb_mutations = mutations = object() - self.assertIs(mutations, row._get_mutations(None)) - def test_get_mutations_size(self): - row_key = b"row_key" - row = self._make_one(row_key, None) +def test_direct_row_constructor_with_non_bytes(): + row_key = object() + with pytest.raises(TypeError): + _make_direct_row(row_key, None) - column_family_id1 = u"column_family_id1" - column_family_id2 = u"column_family_id2" - column1 = b"column1" - column2 = b"column2" - number_of_bytes = 1 * 1024 * 1024 - value = b"1" * number_of_bytes - row.set_cell(column_family_id1, column1, value) - row.set_cell(column_family_id2, column2, value) +def test_direct_row__get_mutations(): + row_key = b"row_key" + row = _make_direct_row(row_key, None) - total_mutations_size = 0 - for mutation in row._get_mutations(): - total_mutations_size += mutation._pb.ByteSize() - - self.assertEqual(row.get_mutations_size(), total_mutations_size) - - def _set_cell_helper( - self, - column=None, - column_bytes=None, - value=b"foobar", - timestamp=None, - timestamp_micros=-1, - ): - import struct - - row_key = b"row_key" - column_family_id = u"column_family_id" - if column is None: - column = b"column" - table = object() - row = self._make_one(row_key, table) - self.assertEqual(row._pb_mutations, []) - row.set_cell(column_family_id, column, value, timestamp=timestamp) - - if isinstance(value, int): - value = struct.pack(">q", value) - expected_pb = _MutationPB( - set_cell=_MutationSetCellPB( - family_name=column_family_id, - column_qualifier=column_bytes or column, - timestamp_micros=timestamp_micros, - value=value, - ) - ) - self.assertEqual(row._pb_mutations, [expected_pb]) + row._pb_mutations = mutations = object() + assert mutations is row._get_mutations(None) - def test_set_cell(self): - self._set_cell_helper() - def test_set_cell_with_string_column(self): - column_bytes = b"column" - column_non_bytes = u"column" - self._set_cell_helper(column=column_non_bytes, column_bytes=column_bytes) +def test_direct_row_get_mutations_size(): + row_key = b"row_key" + row = _make_direct_row(row_key, None) - def test_set_cell_with_integer_value(self): - value = 1337 - self._set_cell_helper(value=value) + column_family_id1 = u"column_family_id1" + column_family_id2 = u"column_family_id2" + column1 = b"column1" + column2 = b"column2" + number_of_bytes = 1 * 1024 * 1024 + value = b"1" * number_of_bytes - def test_set_cell_with_non_bytes_value(self): - row_key = b"row_key" - column = b"column" - column_family_id = u"column_family_id" - table = object() - - row = self._make_one(row_key, table) - value = object() # Not bytes - with self.assertRaises(TypeError): - row.set_cell(column_family_id, column, value) - - def test_set_cell_with_non_null_timestamp(self): - import datetime - from google.cloud._helpers import _EPOCH - - microseconds = 898294371 - millis_granularity = microseconds - (microseconds % 1000) - timestamp = _EPOCH + datetime.timedelta(microseconds=microseconds) - self._set_cell_helper(timestamp=timestamp, timestamp_micros=millis_granularity) - - def test_delete(self): - row_key = b"row_key" - row = self._make_one(row_key, object()) - self.assertEqual(row._pb_mutations, []) - row.delete() - - expected_pb = _MutationPB(delete_from_row=_MutationDeleteFromRowPB()) - self.assertEqual(row._pb_mutations, [expected_pb]) - - def test_delete_cell(self): - klass = self._get_target_class() - - class MockRow(klass): - def __init__(self, *args, **kwargs): - super(MockRow, self).__init__(*args, **kwargs) - self._args = [] - self._kwargs = [] - - # Replace the called method with one that logs arguments. - def _delete_cells(self, *args, **kwargs): - self._args.append(args) - self._kwargs.append(kwargs) - - row_key = b"row_key" - column = b"column" - column_family_id = u"column_family_id" - table = object() - - mock_row = MockRow(row_key, table) - # Make sure no values are set before calling the method. - self.assertEqual(mock_row._pb_mutations, []) - self.assertEqual(mock_row._args, []) - self.assertEqual(mock_row._kwargs, []) - - # Actually make the request against the mock class. - time_range = object() - mock_row.delete_cell(column_family_id, column, time_range=time_range) - self.assertEqual(mock_row._pb_mutations, []) - self.assertEqual(mock_row._args, [(column_family_id, [column])]) - self.assertEqual(mock_row._kwargs, [{"state": None, "time_range": time_range}]) - - def test_delete_cells_non_iterable(self): - row_key = b"row_key" - column_family_id = u"column_family_id" - table = object() - - row = self._make_one(row_key, table) - columns = object() # Not iterable - with self.assertRaises(TypeError): - row.delete_cells(column_family_id, columns) - - def test_delete_cells_all_columns(self): - row_key = b"row_key" - column_family_id = u"column_family_id" - table = object() - - row = self._make_one(row_key, table) - klass = self._get_target_class() - self.assertEqual(row._pb_mutations, []) - row.delete_cells(column_family_id, klass.ALL_COLUMNS) - - expected_pb = _MutationPB( - delete_from_family=_MutationDeleteFromFamilyPB(family_name=column_family_id) - ) - self.assertEqual(row._pb_mutations, [expected_pb]) + row.set_cell(column_family_id1, column1, value) + row.set_cell(column_family_id2, column2, value) - def test_delete_cells_no_columns(self): - row_key = b"row_key" - column_family_id = u"column_family_id" - table = object() + total_mutations_size = 0 + for mutation in row._get_mutations(): + total_mutations_size += mutation._pb.ByteSize() + + assert row.get_mutations_size() == total_mutations_size - row = self._make_one(row_key, table) - columns = [] - self.assertEqual(row._pb_mutations, []) - row.delete_cells(column_family_id, columns) - self.assertEqual(row._pb_mutations, []) - def _delete_cells_helper(self, time_range=None): - row_key = b"row_key" +def _set_cell_helper( + column=None, + column_bytes=None, + value=b"foobar", + timestamp=None, + timestamp_micros=-1, +): + import struct + + row_key = b"row_key" + column_family_id = u"column_family_id" + if column is None: column = b"column" - column_family_id = u"column_family_id" - table = object() - - row = self._make_one(row_key, table) - columns = [column] - self.assertEqual(row._pb_mutations, []) - row.delete_cells(column_family_id, columns, time_range=time_range) - - expected_pb = _MutationPB( - delete_from_column=_MutationDeleteFromColumnPB( - family_name=column_family_id, column_qualifier=column - ) + table = object() + row = _make_direct_row(row_key, table) + assert row._pb_mutations == [] + row.set_cell(column_family_id, column, value, timestamp=timestamp) + + if isinstance(value, int): + value = struct.pack(">q", value) + expected_pb = _MutationPB( + set_cell=_MutationSetCellPB( + family_name=column_family_id, + column_qualifier=column_bytes or column, + timestamp_micros=timestamp_micros, + value=value, ) - if time_range is not None: - expected_pb.delete_from_column.time_range._pb.CopyFrom( - time_range.to_pb()._pb - ) - self.assertEqual(row._pb_mutations, [expected_pb]) - - def test_delete_cells_no_time_range(self): - self._delete_cells_helper() - - def test_delete_cells_with_time_range(self): - import datetime - from google.cloud._helpers import _EPOCH - from google.cloud.bigtable.row_filters import TimestampRange - - microseconds = 30871000 # Makes sure already milliseconds granularity - start = _EPOCH + datetime.timedelta(microseconds=microseconds) - time_range = TimestampRange(start=start) - self._delete_cells_helper(time_range=time_range) - - def test_delete_cells_with_bad_column(self): - # This makes sure a failure on one of the columns doesn't leave - # the row's mutations in a bad state. - row_key = b"row_key" - column = b"column" - column_family_id = u"column_family_id" - table = object() - - row = self._make_one(row_key, table) - columns = [column, object()] - self.assertEqual(row._pb_mutations, []) - with self.assertRaises(TypeError): - row.delete_cells(column_family_id, columns) - self.assertEqual(row._pb_mutations, []) - - def test_delete_cells_with_string_columns(self): - row_key = b"row_key" - column_family_id = u"column_family_id" - column1 = u"column1" - column1_bytes = b"column1" - column2 = u"column2" - column2_bytes = b"column2" - table = object() - - row = self._make_one(row_key, table) - columns = [column1, column2] - self.assertEqual(row._pb_mutations, []) - row.delete_cells(column_family_id, columns) + ) + assert row._pb_mutations == [expected_pb] - expected_pb1 = _MutationPB( - delete_from_column=_MutationDeleteFromColumnPB( - family_name=column_family_id, column_qualifier=column1_bytes - ) - ) - expected_pb2 = _MutationPB( - delete_from_column=_MutationDeleteFromColumnPB( - family_name=column_family_id, column_qualifier=column2_bytes - ) - ) - self.assertEqual(row._pb_mutations, [expected_pb1, expected_pb2]) - def test_commit(self): - project_id = "project-id" - row_key = b"row_key" - table_name = "projects/more-stuff" - column_family_id = u"column_family_id" - column = b"column" +def test_direct_row_set_cell(): + _set_cell_helper() - credentials = _make_credentials() - client = self._make_client( - project=project_id, credentials=credentials, admin=True - ) - table = _Table(table_name, client=client) - row = self._make_one(row_key, table) - value = b"bytes-value" - # Perform the method and check the result. - row.set_cell(column_family_id, column, value) - row.commit() - self.assertEqual(table.mutated_rows, [row]) +def test_direct_row_set_cell_with_string_column(): + column_bytes = b"column" + column_non_bytes = u"column" + _set_cell_helper(column=column_non_bytes, column_bytes=column_bytes) - def test_commit_with_exception(self): - from google.rpc import status_pb2 - project_id = "project-id" - row_key = b"row_key" - table_name = "projects/more-stuff" - column_family_id = u"column_family_id" - column = b"column" +def test_direct_row_set_cell_with_integer_value(): + value = 1337 + _set_cell_helper(value=value) - credentials = _make_credentials() - client = self._make_client( - project=project_id, credentials=credentials, admin=True - ) - table = _Table(table_name, client=client) - row = self._make_one(row_key, table) - value = b"bytes-value" - # Perform the method and check the result. +def test_direct_row_set_cell_with_non_bytes_value(): + row_key = b"row_key" + column = b"column" + column_family_id = u"column_family_id" + table = object() + + row = _make_direct_row(row_key, table) + value = object() # Not bytes + with pytest.raises(TypeError): row.set_cell(column_family_id, column, value) - result = row.commit() - expected = status_pb2.Status(code=0) - self.assertEqual(result, expected) - - -class TestConditionalRow(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row import ConditionalRow - - return ConditionalRow - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - @staticmethod - def _get_target_client_class(): - from google.cloud.bigtable.client import Client - - return Client - - def _make_client(self, *args, **kwargs): - return self._get_target_client_class()(*args, **kwargs) - - def test_constructor(self): - row_key = b"row_key" - table = object() - filter_ = object() - - row = self._make_one(row_key, table, filter_=filter_) - self.assertEqual(row._row_key, row_key) - self.assertIs(row._table, table) - self.assertIs(row._filter, filter_) - self.assertEqual(row._true_pb_mutations, []) - self.assertEqual(row._false_pb_mutations, []) - - def test__get_mutations(self): - row_key = b"row_key" - filter_ = object() - row = self._make_one(row_key, None, filter_=filter_) - - row._true_pb_mutations = true_mutations = object() - row._false_pb_mutations = false_mutations = object() - self.assertIs(true_mutations, row._get_mutations(True)) - self.assertIs(false_mutations, row._get_mutations(False)) - self.assertIs(false_mutations, row._get_mutations(None)) - - def test_commit(self): - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - - project_id = "project-id" - row_key = b"row_key" - table_name = "projects/more-stuff" - app_profile_id = "app_profile_id" - column_family_id1 = u"column_family_id1" - column_family_id2 = u"column_family_id2" - column_family_id3 = u"column_family_id3" - column1 = b"column1" - column2 = b"column2" - - api = mock.create_autospec(BigtableClient) - credentials = _make_credentials() - client = self._make_client( - project=project_id, credentials=credentials, admin=True - ) - table = _Table(table_name, client=client, app_profile_id=app_profile_id) - row_filter = RowSampleFilter(0.33) - row = self._make_one(row_key, table, filter_=row_filter) - # Create request_pb - value1 = b"bytes-value" - # Create response_pb - predicate_matched = True - response_pb = _CheckAndMutateRowResponsePB(predicate_matched=predicate_matched) +def test_direct_row_set_cell_with_non_null_timestamp(): + import datetime + from google.cloud._helpers import _EPOCH - # Patch the stub used by the API method. - api.check_and_mutate_row.side_effect = [response_pb] - client._table_data_client = api + microseconds = 898294371 + millis_granularity = microseconds - (microseconds % 1000) + timestamp = _EPOCH + datetime.timedelta(microseconds=microseconds) + _set_cell_helper(timestamp=timestamp, timestamp_micros=millis_granularity) - # Create expected_result. - expected_result = predicate_matched - # Perform the method and check the result. - row.set_cell(column_family_id1, column1, value1, state=True) - row.delete(state=False) - row.delete_cell(column_family_id2, column2, state=True) - row.delete_cells(column_family_id3, row.ALL_COLUMNS, state=True) - result = row.commit() - call_args = api.check_and_mutate_row.call_args - self.assertEqual(app_profile_id, call_args.app_profile_id[0]) - self.assertEqual(result, expected_result) - self.assertEqual(row._true_pb_mutations, []) - self.assertEqual(row._false_pb_mutations, []) - - def test_commit_too_many_mutations(self): - from google.cloud._testing import _Monkey - from google.cloud.bigtable import row as MUT - - row_key = b"row_key" - table = object() - filter_ = object() - row = self._make_one(row_key, table, filter_=filter_) - row._true_pb_mutations = [1, 2, 3] - num_mutations = len(row._true_pb_mutations) - with _Monkey(MUT, MAX_MUTATIONS=num_mutations - 1): - with self.assertRaises(ValueError): - row.commit() - - def test_commit_no_mutations(self): - from tests.unit._testing import _FakeStub - - project_id = "project-id" - row_key = b"row_key" - - credentials = _make_credentials() - client = self._make_client( - project=project_id, credentials=credentials, admin=True - ) - table = _Table(None, client=client) - filter_ = object() - row = self._make_one(row_key, table, filter_=filter_) - self.assertEqual(row._true_pb_mutations, []) - self.assertEqual(row._false_pb_mutations, []) +def test_direct_row_delete(): + row_key = b"row_key" + row = _make_direct_row(row_key, object()) + assert row._pb_mutations == [] + row.delete() - # Patch the stub used by the API method. - stub = _FakeStub() + expected_pb = _MutationPB(delete_from_row=_MutationDeleteFromRowPB()) + assert row._pb_mutations == [expected_pb] - # Perform the method and check the result. - result = row.commit() - self.assertIsNone(result) - # Make sure no request was sent. - self.assertEqual(stub.method_calls, []) +def test_direct_row_delete_cell(): + from google.cloud.bigtable.row import DirectRow -class TestAppendRow(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row import AppendRow + class MockRow(DirectRow): + def __init__(self, *args, **kwargs): + super(MockRow, self).__init__(*args, **kwargs) + self._args = [] + self._kwargs = [] - return AppendRow + # Replace the called method with one that logs arguments. + def _delete_cells(self, *args, **kwargs): + self._args.append(args) + self._kwargs.append(kwargs) - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) + row_key = b"row_key" + column = b"column" + column_family_id = u"column_family_id" + table = object() - @staticmethod - def _get_target_client_class(): - from google.cloud.bigtable.client import Client + mock_row = MockRow(row_key, table) + # Make sure no values are set before calling the method. + assert mock_row._pb_mutations == [] + assert mock_row._args == [] + assert mock_row._kwargs == [] - return Client + # Actually make the request against the mock class. + time_range = object() + mock_row.delete_cell(column_family_id, column, time_range=time_range) + assert mock_row._pb_mutations == [] + assert mock_row._args == [(column_family_id, [column])] + assert mock_row._kwargs == [{"state": None, "time_range": time_range}] - def _make_client(self, *args, **kwargs): - return self._get_target_client_class()(*args, **kwargs) - def test_constructor(self): - row_key = b"row_key" - table = object() +def test_direct_row_delete_cells_non_iterable(): + row_key = b"row_key" + column_family_id = u"column_family_id" + table = object() - row = self._make_one(row_key, table) - self.assertEqual(row._row_key, row_key) - self.assertIs(row._table, table) - self.assertEqual(row._rule_pb_list, []) + row = _make_direct_row(row_key, table) + columns = object() # Not iterable + with pytest.raises(TypeError): + row.delete_cells(column_family_id, columns) - def test_clear(self): - row_key = b"row_key" - table = object() - row = self._make_one(row_key, table) - row._rule_pb_list = [1, 2, 3] - row.clear() - self.assertEqual(row._rule_pb_list, []) - def test_append_cell_value(self): - table = object() - row_key = b"row_key" - row = self._make_one(row_key, table) - self.assertEqual(row._rule_pb_list, []) +def test_direct_row_delete_cells_all_columns(): + from google.cloud.bigtable.row import DirectRow - column = b"column" - column_family_id = u"column_family_id" - value = b"bytes-val" - row.append_cell_value(column_family_id, column, value) - expected_pb = _ReadModifyWriteRulePB( - family_name=column_family_id, column_qualifier=column, append_value=value - ) - self.assertEqual(row._rule_pb_list, [expected_pb]) + row_key = b"row_key" + column_family_id = u"column_family_id" + table = object() - def test_increment_cell_value(self): - table = object() - row_key = b"row_key" - row = self._make_one(row_key, table) - self.assertEqual(row._rule_pb_list, []) + row = _make_direct_row(row_key, table) + assert row._pb_mutations == [] + row.delete_cells(column_family_id, DirectRow.ALL_COLUMNS) - column = b"column" - column_family_id = u"column_family_id" - int_value = 281330 - row.increment_cell_value(column_family_id, column, int_value) - expected_pb = _ReadModifyWriteRulePB( - family_name=column_family_id, - column_qualifier=column, - increment_amount=int_value, - ) - self.assertEqual(row._rule_pb_list, [expected_pb]) - - def test_commit(self): - from google.cloud._testing import _Monkey - from google.cloud.bigtable import row as MUT - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - - project_id = "project-id" - row_key = b"row_key" - table_name = "projects/more-stuff" - app_profile_id = "app_profile_id" - column_family_id = u"column_family_id" - column = b"column" + expected_pb = _MutationPB( + delete_from_family=_MutationDeleteFromFamilyPB(family_name=column_family_id) + ) + assert row._pb_mutations == [expected_pb] - api = mock.create_autospec(BigtableClient) - credentials = _make_credentials() - client = self._make_client( - project=project_id, credentials=credentials, admin=True - ) - table = _Table(table_name, client=client, app_profile_id=app_profile_id) - row = self._make_one(row_key, table) - - # Create request_pb - value = b"bytes-value" - - # Create expected_result. - row_responses = [] - expected_result = object() - - # Patch API calls - client._table_data_client = api - - def mock_parse_rmw_row_response(row_response): - row_responses.append(row_response) - return expected_result - - # Perform the method and check the result. - with _Monkey(MUT, _parse_rmw_row_response=mock_parse_rmw_row_response): - row._table._instance._client._table_data_client = api - row.append_cell_value(column_family_id, column, value) - result = row.commit() - call_args = api.read_modify_write_row.call_args_list[0] - self.assertEqual(app_profile_id, call_args.app_profile_id[0]) - self.assertEqual(result, expected_result) - self.assertEqual(row._rule_pb_list, []) - - def test_commit_no_rules(self): - from tests.unit._testing import _FakeStub - - project_id = "project-id" - row_key = b"row_key" - - credentials = _make_credentials() - client = self._make_client( - project=project_id, credentials=credentials, admin=True - ) - table = _Table(None, client=client) - row = self._make_one(row_key, table) - self.assertEqual(row._rule_pb_list, []) +def test_direct_row_delete_cells_no_columns(): + row_key = b"row_key" + column_family_id = u"column_family_id" + table = object() - # Patch the stub used by the API method. - stub = _FakeStub() + row = _make_direct_row(row_key, table) + columns = [] + assert row._pb_mutations == [] + row.delete_cells(column_family_id, columns) + assert row._pb_mutations == [] - # Perform the method and check the result. - result = row.commit() - self.assertEqual(result, {}) - # Make sure no request was sent. - self.assertEqual(stub.method_calls, []) - - def test_commit_too_many_mutations(self): - from google.cloud._testing import _Monkey - from google.cloud.bigtable import row as MUT - - row_key = b"row_key" - table = object() - row = self._make_one(row_key, table) - row._rule_pb_list = [1, 2, 3] - num_mutations = len(row._rule_pb_list) - with _Monkey(MUT, MAX_MUTATIONS=num_mutations - 1): - with self.assertRaises(ValueError): - row.commit() - - -class Test__parse_rmw_row_response(unittest.TestCase): - def _call_fut(self, row_response): - from google.cloud.bigtable.row import _parse_rmw_row_response - - return _parse_rmw_row_response(row_response) - - def test_it(self): - from google.cloud._helpers import _datetime_from_microseconds - - col_fam1 = u"col-fam-id" - col_fam2 = u"col-fam-id2" - col_name1 = b"col-name1" - col_name2 = b"col-name2" - col_name3 = b"col-name3-but-other-fam" - cell_val1 = b"cell-val" - cell_val2 = b"cell-val-newer" - cell_val3 = b"altcol-cell-val" - cell_val4 = b"foo" - - microseconds = 1000871 - timestamp = _datetime_from_microseconds(microseconds) - expected_output = { - col_fam1: { - col_name1: [(cell_val1, timestamp), (cell_val2, timestamp)], - col_name2: [(cell_val3, timestamp)], - }, - col_fam2: {col_name3: [(cell_val4, timestamp)]}, - } - response_row = _RowPB( - families=[ - _FamilyPB( - name=col_fam1, - columns=[ - _ColumnPB( - qualifier=col_name1, - cells=[ - _CellPB(value=cell_val1, timestamp_micros=microseconds), - _CellPB(value=cell_val2, timestamp_micros=microseconds), - ], - ), - _ColumnPB( - qualifier=col_name2, - cells=[ - _CellPB(value=cell_val3, timestamp_micros=microseconds) - ], - ), - ], - ), - _FamilyPB( - name=col_fam2, - columns=[ - _ColumnPB( - qualifier=col_name3, - cells=[ - _CellPB(value=cell_val4, timestamp_micros=microseconds) - ], - ) - ], - ), - ] + +def _delete_cells_helper(time_range=None): + row_key = b"row_key" + column = b"column" + column_family_id = u"column_family_id" + table = object() + + row = _make_direct_row(row_key, table) + columns = [column] + assert row._pb_mutations == [] + row.delete_cells(column_family_id, columns, time_range=time_range) + + expected_pb = _MutationPB( + delete_from_column=_MutationDeleteFromColumnPB( + family_name=column_family_id, column_qualifier=column ) - sample_input = _ReadModifyWriteRowResponsePB(row=response_row) - self.assertEqual(expected_output, self._call_fut(sample_input)) + ) + if time_range is not None: + expected_pb.delete_from_column.time_range._pb.CopyFrom(time_range.to_pb()._pb) + assert row._pb_mutations == [expected_pb] + + +def test_direct_row_delete_cells_no_time_range(): + _delete_cells_helper() + + +def test_direct_row_delete_cells_with_time_range(): + import datetime + from google.cloud._helpers import _EPOCH + from google.cloud.bigtable.row_filters import TimestampRange + microseconds = 30871000 # Makes sure already milliseconds granularity + start = _EPOCH + datetime.timedelta(microseconds=microseconds) + time_range = TimestampRange(start=start) + _delete_cells_helper(time_range=time_range) -class Test__parse_family_pb(unittest.TestCase): - def _call_fut(self, family_pb): - from google.cloud.bigtable.row import _parse_family_pb - return _parse_family_pb(family_pb) +def test_direct_row_delete_cells_with_bad_column(): + # This makes sure a failure on one of the columns doesn't leave + # the row's mutations in a bad state. + row_key = b"row_key" + column = b"column" + column_family_id = u"column_family_id" + table = object() - def test_it(self): - from google.cloud._helpers import _datetime_from_microseconds + row = _make_direct_row(row_key, table) + columns = [column, object()] + assert row._pb_mutations == [] + with pytest.raises(TypeError): + row.delete_cells(column_family_id, columns) + assert row._pb_mutations == [] + + +def test_direct_row_delete_cells_with_string_columns(): + row_key = b"row_key" + column_family_id = u"column_family_id" + column1 = u"column1" + column1_bytes = b"column1" + column2 = u"column2" + column2_bytes = b"column2" + table = object() + + row = _make_direct_row(row_key, table) + columns = [column1, column2] + assert row._pb_mutations == [] + row.delete_cells(column_family_id, columns) + + expected_pb1 = _MutationPB( + delete_from_column=_MutationDeleteFromColumnPB( + family_name=column_family_id, column_qualifier=column1_bytes + ) + ) + expected_pb2 = _MutationPB( + delete_from_column=_MutationDeleteFromColumnPB( + family_name=column_family_id, column_qualifier=column2_bytes + ) + ) + assert row._pb_mutations == [expected_pb1, expected_pb2] + + +def test_direct_row_commit(): + project_id = "project-id" + row_key = b"row_key" + table_name = "projects/more-stuff" + column_family_id = u"column_family_id" + column = b"column" - col_fam1 = u"col-fam-id" - col_name1 = b"col-name1" - col_name2 = b"col-name2" - cell_val1 = b"cell-val" - cell_val2 = b"cell-val-newer" - cell_val3 = b"altcol-cell-val" + credentials = _make_credentials() + client = _make_client(project=project_id, credentials=credentials, admin=True) + table = _Table(table_name, client=client) + row = _make_direct_row(row_key, table) + value = b"bytes-value" + + # Perform the method and check the result. + row.set_cell(column_family_id, column, value) + row.commit() + assert table.mutated_rows == [row] + + +def test_direct_row_commit_with_exception(): + from google.rpc import status_pb2 + + project_id = "project-id" + row_key = b"row_key" + table_name = "projects/more-stuff" + column_family_id = u"column_family_id" + column = b"column" + + credentials = _make_credentials() + client = _make_client(project=project_id, credentials=credentials, admin=True) + table = _Table(table_name, client=client) + row = _make_direct_row(row_key, table) + value = b"bytes-value" + + # Perform the method and check the result. + row.set_cell(column_family_id, column, value) + result = row.commit() + expected = status_pb2.Status(code=0) + assert result == expected + + +def _make_conditional_row(*args, **kwargs): + from google.cloud.bigtable.row import ConditionalRow + + return ConditionalRow(*args, **kwargs) + + +def test_conditional_row_constructor(): + row_key = b"row_key" + table = object() + filter_ = object() + + row = _make_conditional_row(row_key, table, filter_=filter_) + assert row._row_key == row_key + assert row._table is table + assert row._filter is filter_ + assert row._true_pb_mutations == [] + assert row._false_pb_mutations == [] + + +def test_conditional_row__get_mutations(): + row_key = b"row_key" + filter_ = object() + row = _make_conditional_row(row_key, None, filter_=filter_) + + row._true_pb_mutations = true_mutations = object() + row._false_pb_mutations = false_mutations = object() + assert true_mutations is row._get_mutations(True) + assert false_mutations is row._get_mutations(False) + assert false_mutations is row._get_mutations(None) + + +def test_conditional_row_commit(): + from google.cloud.bigtable.row_filters import RowSampleFilter + from google.cloud.bigtable_v2.services.bigtable import BigtableClient + + project_id = "project-id" + row_key = b"row_key" + table_name = "projects/more-stuff" + app_profile_id = "app_profile_id" + column_family_id1 = u"column_family_id1" + column_family_id2 = u"column_family_id2" + column_family_id3 = u"column_family_id3" + column1 = b"column1" + column2 = b"column2" + + api = mock.create_autospec(BigtableClient) + credentials = _make_credentials() + client = _make_client(project=project_id, credentials=credentials, admin=True) + table = _Table(table_name, client=client, app_profile_id=app_profile_id) + row_filter = RowSampleFilter(0.33) + row = _make_conditional_row(row_key, table, filter_=row_filter) + + # Create request_pb + value1 = b"bytes-value" + + # Create response_pb + predicate_matched = True + response_pb = _CheckAndMutateRowResponsePB(predicate_matched=predicate_matched) + + # Patch the stub used by the API method. + api.check_and_mutate_row.side_effect = [response_pb] + client._table_data_client = api + + # Create expected_result. + expected_result = predicate_matched + + # Perform the method and check the result. + row.set_cell(column_family_id1, column1, value1, state=True) + row.delete(state=False) + row.delete_cell(column_family_id2, column2, state=True) + row.delete_cells(column_family_id3, row.ALL_COLUMNS, state=True) + result = row.commit() + call_args = api.check_and_mutate_row.call_args + assert app_profile_id == call_args.app_profile_id[0] + assert result == expected_result + assert row._true_pb_mutations == [] + assert row._false_pb_mutations == [] + + +def test_conditional_row_commit_too_many_mutations(): + from google.cloud._testing import _Monkey + from google.cloud.bigtable import row as MUT + + row_key = b"row_key" + table = object() + filter_ = object() + row = _make_conditional_row(row_key, table, filter_=filter_) + row._true_pb_mutations = [1, 2, 3] + num_mutations = len(row._true_pb_mutations) + with _Monkey(MUT, MAX_MUTATIONS=num_mutations - 1): + with pytest.raises(ValueError): + row.commit() + + +def test_conditional_row_commit_no_mutations(): + from tests.unit._testing import _FakeStub + + project_id = "project-id" + row_key = b"row_key" + + credentials = _make_credentials() + client = _make_client(project=project_id, credentials=credentials, admin=True) + table = _Table(None, client=client) + filter_ = object() + row = _make_conditional_row(row_key, table, filter_=filter_) + assert row._true_pb_mutations == [] + assert row._false_pb_mutations == [] + + # Patch the stub used by the API method. + stub = _FakeStub() + + # Perform the method and check the result. + result = row.commit() + assert result is None + # Make sure no request was sent. + assert stub.method_calls == [] + + +def _make_append_row(*args, **kwargs): + from google.cloud.bigtable.row import AppendRow + + return AppendRow(*args, **kwargs) + + +def test_append_row_constructor(): + row_key = b"row_key" + table = object() + + row = _make_append_row(row_key, table) + assert row._row_key == row_key + assert row._table is table + assert row._rule_pb_list == [] + + +def test_append_row_clear(): + row_key = b"row_key" + table = object() + row = _make_append_row(row_key, table) + row._rule_pb_list = [1, 2, 3] + row.clear() + assert row._rule_pb_list == [] + + +def test_append_row_append_cell_value(): + table = object() + row_key = b"row_key" + row = _make_append_row(row_key, table) + assert row._rule_pb_list == [] + + column = b"column" + column_family_id = u"column_family_id" + value = b"bytes-val" + row.append_cell_value(column_family_id, column, value) + expected_pb = _ReadModifyWriteRulePB( + family_name=column_family_id, column_qualifier=column, append_value=value + ) + assert row._rule_pb_list == [expected_pb] + + +def test_append_row_increment_cell_value(): + table = object() + row_key = b"row_key" + row = _make_append_row(row_key, table) + assert row._rule_pb_list == [] + + column = b"column" + column_family_id = u"column_family_id" + int_value = 281330 + row.increment_cell_value(column_family_id, column, int_value) + expected_pb = _ReadModifyWriteRulePB( + family_name=column_family_id, + column_qualifier=column, + increment_amount=int_value, + ) + assert row._rule_pb_list == [expected_pb] + + +def test_append_row_commit(): + from google.cloud._testing import _Monkey + from google.cloud.bigtable import row as MUT + from google.cloud.bigtable_v2.services.bigtable import BigtableClient + + project_id = "project-id" + row_key = b"row_key" + table_name = "projects/more-stuff" + app_profile_id = "app_profile_id" + column_family_id = u"column_family_id" + column = b"column" + + api = mock.create_autospec(BigtableClient) - microseconds = 5554441037 - timestamp = _datetime_from_microseconds(microseconds) - expected_dict = { + credentials = _make_credentials() + client = _make_client(project=project_id, credentials=credentials, admin=True) + table = _Table(table_name, client=client, app_profile_id=app_profile_id) + row = _make_append_row(row_key, table) + + # Create request_pb + value = b"bytes-value" + + # Create expected_result. + row_responses = [] + expected_result = object() + + # Patch API calls + client._table_data_client = api + + def mock_parse_rmw_row_response(row_response): + row_responses.append(row_response) + return expected_result + + # Perform the method and check the result. + with _Monkey(MUT, _parse_rmw_row_response=mock_parse_rmw_row_response): + row._table._instance._client._table_data_client = api + row.append_cell_value(column_family_id, column, value) + result = row.commit() + call_args = api.read_modify_write_row.call_args_list[0] + assert app_profile_id == call_args.app_profile_id[0] + assert result == expected_result + assert row._rule_pb_list == [] + + +def test_append_row_commit_no_rules(): + from tests.unit._testing import _FakeStub + + project_id = "project-id" + row_key = b"row_key" + + credentials = _make_credentials() + client = _make_client(project=project_id, credentials=credentials, admin=True) + table = _Table(None, client=client) + row = _make_append_row(row_key, table) + assert row._rule_pb_list == [] + + # Patch the stub used by the API method. + stub = _FakeStub() + + # Perform the method and check the result. + result = row.commit() + assert result == {} + # Make sure no request was sent. + assert stub.method_calls == [] + + +def test_append_row_commit_too_many_mutations(): + from google.cloud._testing import _Monkey + from google.cloud.bigtable import row as MUT + + row_key = b"row_key" + table = object() + row = _make_append_row(row_key, table) + row._rule_pb_list = [1, 2, 3] + num_mutations = len(row._rule_pb_list) + with _Monkey(MUT, MAX_MUTATIONS=num_mutations - 1): + with pytest.raises(ValueError): + row.commit() + + +def test__parse_rmw_row_response(): + from google.cloud._helpers import _datetime_from_microseconds + from google.cloud.bigtable.row import _parse_rmw_row_response + + col_fam1 = u"col-fam-id" + col_fam2 = u"col-fam-id2" + col_name1 = b"col-name1" + col_name2 = b"col-name2" + col_name3 = b"col-name3-but-other-fam" + cell_val1 = b"cell-val" + cell_val2 = b"cell-val-newer" + cell_val3 = b"altcol-cell-val" + cell_val4 = b"foo" + + microseconds = 1000871 + timestamp = _datetime_from_microseconds(microseconds) + expected_output = { + col_fam1: { col_name1: [(cell_val1, timestamp), (cell_val2, timestamp)], col_name2: [(cell_val3, timestamp)], - } - expected_output = (col_fam1, expected_dict) - sample_input = _FamilyPB( - name=col_fam1, - columns=[ - _ColumnPB( - qualifier=col_name1, - cells=[ - _CellPB(value=cell_val1, timestamp_micros=microseconds), - _CellPB(value=cell_val2, timestamp_micros=microseconds), - ], - ), - _ColumnPB( - qualifier=col_name2, - cells=[_CellPB(value=cell_val3, timestamp_micros=microseconds)], - ), - ], - ) - self.assertEqual(expected_output, self._call_fut(sample_input)) + }, + col_fam2: {col_name3: [(cell_val4, timestamp)]}, + } + response_row = _RowPB( + families=[ + _FamilyPB( + name=col_fam1, + columns=[ + _ColumnPB( + qualifier=col_name1, + cells=[ + _CellPB(value=cell_val1, timestamp_micros=microseconds), + _CellPB(value=cell_val2, timestamp_micros=microseconds), + ], + ), + _ColumnPB( + qualifier=col_name2, + cells=[_CellPB(value=cell_val3, timestamp_micros=microseconds)], + ), + ], + ), + _FamilyPB( + name=col_fam2, + columns=[ + _ColumnPB( + qualifier=col_name3, + cells=[_CellPB(value=cell_val4, timestamp_micros=microseconds)], + ) + ], + ), + ] + ) + sample_input = _ReadModifyWriteRowResponsePB(row=response_row) + assert expected_output == _parse_rmw_row_response(sample_input) + + +def test__parse_family_pb(): + from google.cloud._helpers import _datetime_from_microseconds + from google.cloud.bigtable.row import _parse_family_pb + + col_fam1 = u"col-fam-id" + col_name1 = b"col-name1" + col_name2 = b"col-name2" + cell_val1 = b"cell-val" + cell_val2 = b"cell-val-newer" + cell_val3 = b"altcol-cell-val" + + microseconds = 5554441037 + timestamp = _datetime_from_microseconds(microseconds) + expected_dict = { + col_name1: [(cell_val1, timestamp), (cell_val2, timestamp)], + col_name2: [(cell_val3, timestamp)], + } + expected_output = (col_fam1, expected_dict) + sample_input = _FamilyPB( + name=col_fam1, + columns=[ + _ColumnPB( + qualifier=col_name1, + cells=[ + _CellPB(value=cell_val1, timestamp_micros=microseconds), + _CellPB(value=cell_val2, timestamp_micros=microseconds), + ], + ), + _ColumnPB( + qualifier=col_name2, + cells=[_CellPB(value=cell_val3, timestamp_micros=microseconds)], + ), + ], + ) + assert expected_output == _parse_family_pb(sample_input) def _CheckAndMutateRowResponsePB(*args, **kw): From 6b17ec21bead2f09e69a9863117e16f7da6e9bc3 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 20 Oct 2021 18:54:53 -0400 Subject: [PATCH 12/16] tests: refactor 'row_data' unit tests w/ pytest idioms --- tests/unit/test_row_data.py | 2143 ++++++++++++++++++----------------- 1 file changed, 1114 insertions(+), 1029 deletions(-) diff --git a/tests/unit/test_row_data.py b/tests/unit/test_row_data.py index a95cf2ec4..4a2120fb6 100644 --- a/tests/unit/test_row_data.py +++ b/tests/unit/test_row_data.py @@ -13,1219 +13,1295 @@ # limitations under the License. -import unittest +import os + import mock +import pytest -from google.api_core.exceptions import DeadlineExceeded from ._testing import _make_credentials -from google.cloud.bigtable.row_set import RowRange -from google.cloud.bigtable_v2.types import data as data_v2_pb2 - - -class TestCell(unittest.TestCase): - timestamp_micros = 18738724000 # Make sure millis granularity - - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_data import Cell - - return Cell - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def _from_pb_test_helper(self, labels=None): - import datetime - from google.cloud._helpers import _EPOCH - from google.cloud.bigtable_v2.types import data as data_v2_pb2 - - timestamp_micros = TestCell.timestamp_micros - timestamp = _EPOCH + datetime.timedelta(microseconds=timestamp_micros) - value = b"value-bytes" - - if labels is None: - cell_pb = data_v2_pb2.Cell(value=value, timestamp_micros=timestamp_micros) - cell_expected = self._make_one(value, timestamp_micros) - else: - cell_pb = data_v2_pb2.Cell( - value=value, timestamp_micros=timestamp_micros, labels=labels - ) - cell_expected = self._make_one(value, timestamp_micros, labels=labels) - - klass = self._get_target_class() - result = klass.from_pb(cell_pb) - self.assertEqual(result, cell_expected) - self.assertEqual(result.timestamp, timestamp) - - def test_from_pb(self): - self._from_pb_test_helper() - - def test_from_pb_with_labels(self): - labels = [u"label1", u"label2"] - self._from_pb_test_helper(labels) - - def test_constructor(self): - value = object() - cell = self._make_one(value, TestCell.timestamp_micros) - self.assertEqual(cell.value, value) - - def test___eq__(self): - value = object() - cell1 = self._make_one(value, TestCell.timestamp_micros) - cell2 = self._make_one(value, TestCell.timestamp_micros) - self.assertEqual(cell1, cell2) - - def test___eq__type_differ(self): - cell1 = self._make_one(None, None) - cell2 = object() - self.assertNotEqual(cell1, cell2) - - def test___ne__same_value(self): - value = object() - cell1 = self._make_one(value, TestCell.timestamp_micros) - cell2 = self._make_one(value, TestCell.timestamp_micros) - comparison_val = cell1 != cell2 - self.assertFalse(comparison_val) - - def test___ne__(self): - value1 = "value1" - value2 = "value2" - cell1 = self._make_one(value1, TestCell.timestamp_micros) - cell2 = self._make_one(value2, TestCell.timestamp_micros) - self.assertNotEqual(cell1, cell2) - - -class TestPartialRowData(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_data import PartialRowData - - return PartialRowData - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor(self): - row_key = object() - partial_row_data = self._make_one(row_key) - self.assertIs(partial_row_data._row_key, row_key) - self.assertEqual(partial_row_data._cells, {}) - - def test___eq__(self): - row_key = object() - partial_row_data1 = self._make_one(row_key) - partial_row_data2 = self._make_one(row_key) - self.assertEqual(partial_row_data1, partial_row_data2) - - def test___eq__type_differ(self): - partial_row_data1 = self._make_one(None) - partial_row_data2 = object() - self.assertNotEqual(partial_row_data1, partial_row_data2) - - def test___ne__same_value(self): - row_key = object() - partial_row_data1 = self._make_one(row_key) - partial_row_data2 = self._make_one(row_key) - comparison_val = partial_row_data1 != partial_row_data2 - self.assertFalse(comparison_val) - - def test___ne__(self): - row_key1 = object() - partial_row_data1 = self._make_one(row_key1) - row_key2 = object() - partial_row_data2 = self._make_one(row_key2) - self.assertNotEqual(partial_row_data1, partial_row_data2) - - def test___ne__cells(self): - row_key = object() - partial_row_data1 = self._make_one(row_key) - partial_row_data1._cells = object() - partial_row_data2 = self._make_one(row_key) - self.assertNotEqual(partial_row_data1, partial_row_data2) - - def test_to_dict(self): - cell1 = object() - cell2 = object() - cell3 = object() - - family_name1 = u"name1" - family_name2 = u"name2" - qual1 = b"col1" - qual2 = b"col2" - qual3 = b"col3" - - partial_row_data = self._make_one(None) - partial_row_data._cells = { - family_name1: {qual1: cell1, qual2: cell2}, - family_name2: {qual3: cell3}, - } - - result = partial_row_data.to_dict() - expected_result = { - b"name1:col1": cell1, - b"name1:col2": cell2, - b"name2:col3": cell3, - } - self.assertEqual(result, expected_result) - - def test_cell_value(self): - family_name = u"name1" - qualifier = b"col1" - cell = _make_cell(b"value-bytes") - - partial_row_data = self._make_one(None) - partial_row_data._cells = {family_name: {qualifier: [cell]}} - - result = partial_row_data.cell_value(family_name, qualifier) - self.assertEqual(result, cell.value) - - def test_cell_value_invalid_index(self): - family_name = u"name1" - qualifier = b"col1" - cell = _make_cell(b"") - - partial_row_data = self._make_one(None) - partial_row_data._cells = {family_name: {qualifier: [cell]}} - - with self.assertRaises(IndexError): - partial_row_data.cell_value(family_name, qualifier, index=None) - - def test_cell_value_invalid_column_family_key(self): - family_name = u"name1" - qualifier = b"col1" - - partial_row_data = self._make_one(None) - - with self.assertRaises(KeyError): - partial_row_data.cell_value(family_name, qualifier) - - def test_cell_value_invalid_column_key(self): - family_name = u"name1" - qualifier = b"col1" - - partial_row_data = self._make_one(None) - partial_row_data._cells = {family_name: {}} - - with self.assertRaises(KeyError): - partial_row_data.cell_value(family_name, qualifier) - - def test_cell_values(self): - family_name = u"name1" - qualifier = b"col1" - cell = _make_cell(b"value-bytes") - - partial_row_data = self._make_one(None) - partial_row_data._cells = {family_name: {qualifier: [cell]}} - - values = [] - for value, timestamp_micros in partial_row_data.cell_values( - family_name, qualifier - ): - values.append(value) - - self.assertEqual(values[0], cell.value) - - def test_cell_values_with_max_count(self): - family_name = u"name1" - qualifier = b"col1" - cell_1 = _make_cell(b"value-bytes-1") - cell_2 = _make_cell(b"value-bytes-2") - - partial_row_data = self._make_one(None) - partial_row_data._cells = {family_name: {qualifier: [cell_1, cell_2]}} - - values = [] - for value, timestamp_micros in partial_row_data.cell_values( - family_name, qualifier, max_count=1 - ): - values.append(value) - - self.assertEqual(1, len(values)) - self.assertEqual(values[0], cell_1.value) - def test_cells_property(self): - partial_row_data = self._make_one(None) - cells = {1: 2} - partial_row_data._cells = cells - self.assertEqual(partial_row_data.cells, cells) +TIMESTAMP_MICROS = 18738724000 # Make sure millis granularity +ROW_KEY = b"row-key" +FAMILY_NAME = u"family" +QUALIFIER = b"qualifier" +TIMESTAMP_MICROS = 100 +VALUE = b"value" +TABLE_NAME = "table_name" - def test_row_key_getter(self): - row_key = object() - partial_row_data = self._make_one(row_key) - self.assertIs(partial_row_data.row_key, row_key) +def _make_cell(*args, **kwargs): + from google.cloud.bigtable.row_data import Cell -class _Client(object): + return Cell(*args, **kwargs) - data_stub = None +def _cell_from_pb_test_helper(labels=None): + import datetime + from google.cloud._helpers import _EPOCH + from google.cloud.bigtable_v2.types import data as data_v2_pb2 + from google.cloud.bigtable.row_data import Cell -class Test_retry_read_rows_exception(unittest.TestCase): - @staticmethod - def _call_fut(exc): - from google.cloud.bigtable.row_data import _retry_read_rows_exception + timestamp = _EPOCH + datetime.timedelta(microseconds=TIMESTAMP_MICROS) + value = b"value-bytes" - return _retry_read_rows_exception(exc) + if labels is None: + cell_pb = data_v2_pb2.Cell(value=value, timestamp_micros=TIMESTAMP_MICROS) + cell_expected = _make_cell(value, TIMESTAMP_MICROS) + else: + cell_pb = data_v2_pb2.Cell( + value=value, timestamp_micros=TIMESTAMP_MICROS, labels=labels + ) + cell_expected = _make_cell(value, TIMESTAMP_MICROS, labels=labels) - @staticmethod - def _make_grpc_call_error(exception): - from grpc import Call - from grpc import RpcError + result = Cell.from_pb(cell_pb) - class TestingException(Call, RpcError): - def __init__(self, exception): - self.exception = exception + assert result == cell_expected + assert result.timestamp == timestamp - def code(self): - return self.exception.grpc_status_code - def details(self): - return "Testing" +def test_cell_from_pb(): + _cell_from_pb_test_helper() - return TestingException(exception) - def test_w_miss(self): - from google.api_core.exceptions import Conflict +def test_cell_from_pb_with_labels(): + labels = [u"label1", u"label2"] + _cell_from_pb_test_helper(labels) - exception = Conflict("testing") - self.assertFalse(self._call_fut(exception)) - def test_w_service_unavailable(self): - from google.api_core.exceptions import ServiceUnavailable +def test_cell_constructor(): + value = object() + cell = _make_cell(value, TIMESTAMP_MICROS) + assert cell.value == value - exception = ServiceUnavailable("testing") - self.assertTrue(self._call_fut(exception)) - def test_w_deadline_exceeded(self): - from google.api_core.exceptions import DeadlineExceeded +def test_cell___eq__(): + value = object() + cell1 = _make_cell(value, TIMESTAMP_MICROS) + cell2 = _make_cell(value, TIMESTAMP_MICROS) + assert cell1 == cell2 - exception = DeadlineExceeded("testing") - self.assertTrue(self._call_fut(exception)) - def test_w_miss_wrapped_in_grpc(self): - from google.api_core.exceptions import Conflict +def test_cell___eq__type_differ(): + cell1 = _make_cell(None, None) + cell2 = object() + assert not (cell1 == cell2) - wrapped = Conflict("testing") - exception = self._make_grpc_call_error(wrapped) - self.assertFalse(self._call_fut(exception)) - def test_w_service_unavailable_wrapped_in_grpc(self): - from google.api_core.exceptions import ServiceUnavailable +def test_cell___ne__same_value(): + value = object() + cell1 = _make_cell(value, TIMESTAMP_MICROS) + cell2 = _make_cell(value, TIMESTAMP_MICROS) + assert not (cell1 != cell2) - wrapped = ServiceUnavailable("testing") - exception = self._make_grpc_call_error(wrapped) - self.assertTrue(self._call_fut(exception)) - def test_w_deadline_exceeded_wrapped_in_grpc(self): - from google.api_core.exceptions import DeadlineExceeded +def test_cell___ne__(): + value1 = "value1" + value2 = "value2" + cell1 = _make_cell(value1, TIMESTAMP_MICROS) + cell2 = _make_cell(value2, TIMESTAMP_MICROS) + assert cell1 != cell2 + + +def _make_partial_row_data(*args, **kwargs): + from google.cloud.bigtable.row_data import PartialRowData + + return PartialRowData(*args, **kwargs) + + +def test_partial_row_data_constructor(): + row_key = object() + partial_row_data = _make_partial_row_data(row_key) + assert partial_row_data._row_key is row_key + assert partial_row_data._cells == {} + + +def test_partial_row_data___eq__(): + row_key = object() + partial_row_data1 = _make_partial_row_data(row_key) + partial_row_data2 = _make_partial_row_data(row_key) + assert partial_row_data1 == partial_row_data2 + + +def test_partial_row_data___eq__type_differ(): + partial_row_data1 = _make_partial_row_data(None) + partial_row_data2 = object() + assert not (partial_row_data1 == partial_row_data2) + + +def test_partial_row_data___ne__same_value(): + row_key = object() + partial_row_data1 = _make_partial_row_data(row_key) + partial_row_data2 = _make_partial_row_data(row_key) + assert not (partial_row_data1 != partial_row_data2) + + +def test_partial_row_data___ne__(): + row_key1 = object() + partial_row_data1 = _make_partial_row_data(row_key1) + row_key2 = object() + partial_row_data2 = _make_partial_row_data(row_key2) + assert partial_row_data1 != partial_row_data2 + + +def test_partial_row_data___ne__cells(): + row_key = object() + partial_row_data1 = _make_partial_row_data(row_key) + partial_row_data1._cells = object() + partial_row_data2 = _make_partial_row_data(row_key) + assert partial_row_data1 != partial_row_data2 + + +def test_partial_row_data_to_dict(): + cell1 = object() + cell2 = object() + cell3 = object() + + family_name1 = u"name1" + family_name2 = u"name2" + qual1 = b"col1" + qual2 = b"col2" + qual3 = b"col3" + + partial_row_data = _make_partial_row_data(None) + partial_row_data._cells = { + family_name1: {qual1: cell1, qual2: cell2}, + family_name2: {qual3: cell3}, + } + + result = partial_row_data.to_dict() + expected_result = { + b"name1:col1": cell1, + b"name1:col2": cell2, + b"name2:col3": cell3, + } + assert result == expected_result + + +def test_partial_row_data_cell_value(): + family_name = u"name1" + qualifier = b"col1" + cell = _make_cell_pb(b"value-bytes") + + partial_row_data = _make_partial_row_data(None) + partial_row_data._cells = {family_name: {qualifier: [cell]}} + + result = partial_row_data.cell_value(family_name, qualifier) + assert result == cell.value + + +def test_partial_row_data_cell_value_invalid_index(): + family_name = u"name1" + qualifier = b"col1" + cell = _make_cell_pb(b"") + + partial_row_data = _make_partial_row_data(None) + partial_row_data._cells = {family_name: {qualifier: [cell]}} + + with pytest.raises(IndexError): + partial_row_data.cell_value(family_name, qualifier, index=None) + + +def test_partial_row_data_cell_value_invalid_column_family_key(): + family_name = u"name1" + qualifier = b"col1" + + partial_row_data = _make_partial_row_data(None) + + with pytest.raises(KeyError): + partial_row_data.cell_value(family_name, qualifier) + + +def test_partial_row_data_cell_value_invalid_column_key(): + family_name = u"name1" + qualifier = b"col1" + + partial_row_data = _make_partial_row_data(None) + partial_row_data._cells = {family_name: {}} + + with pytest.raises(KeyError): + partial_row_data.cell_value(family_name, qualifier) + + +def test_partial_row_data_cell_values(): + family_name = u"name1" + qualifier = b"col1" + cell = _make_cell_pb(b"value-bytes") + + partial_row_data = _make_partial_row_data(None) + partial_row_data._cells = {family_name: {qualifier: [cell]}} + + values = [] + for value, timestamp_micros in partial_row_data.cell_values(family_name, qualifier): + values.append(value) + + assert values[0] == cell.value + + +def test_partial_row_data_cell_values_with_max_count(): + family_name = u"name1" + qualifier = b"col1" + cell_1 = _make_cell_pb(b"value-bytes-1") + cell_2 = _make_cell_pb(b"value-bytes-2") + + partial_row_data = _make_partial_row_data(None) + partial_row_data._cells = {family_name: {qualifier: [cell_1, cell_2]}} + + values = [] + for value, timestamp_micros in partial_row_data.cell_values( + family_name, qualifier, max_count=1 + ): + values.append(value) + + assert 1 == len(values) + assert values[0] == cell_1.value + + +def test_partial_row_data_cells_property(): + partial_row_data = _make_partial_row_data(None) + cells = {1: 2} + partial_row_data._cells = cells + assert partial_row_data.cells == cells + + +def test_partial_row_data_row_key_getter(): + row_key = object() + partial_row_data = _make_partial_row_data(row_key) + assert partial_row_data.row_key is row_key + + +def _make_grpc_call_error(exception): + from grpc import Call + from grpc import RpcError + + class TestingException(Call, RpcError): + def __init__(self, exception): + self.exception = exception + + def code(self): + return self.exception.grpc_status_code + + def details(self): + return "Testing" + + return TestingException(exception) + + +def test__retry_read_rows_exception_miss(): + from google.api_core.exceptions import Conflict + from google.cloud.bigtable.row_data import _retry_read_rows_exception + + exception = Conflict("testing") + assert not _retry_read_rows_exception(exception) + + +def test__retry_read_rows_exception_service_unavailable(): + from google.api_core.exceptions import ServiceUnavailable + from google.cloud.bigtable.row_data import _retry_read_rows_exception + + exception = ServiceUnavailable("testing") + assert _retry_read_rows_exception(exception) + + +def test__retry_read_rows_exception_deadline_exceeded(): + from google.api_core.exceptions import DeadlineExceeded + from google.cloud.bigtable.row_data import _retry_read_rows_exception + + exception = DeadlineExceeded("testing") + assert _retry_read_rows_exception(exception) + + +def test__retry_read_rows_exception_miss_wrapped_in_grpc(): + from google.api_core.exceptions import Conflict + from google.cloud.bigtable.row_data import _retry_read_rows_exception + + wrapped = Conflict("testing") + exception = _make_grpc_call_error(wrapped) + assert not _retry_read_rows_exception(exception) + + +def test__retry_read_rows_exception_service_unavailable_wrapped_in_grpc(): + from google.api_core.exceptions import ServiceUnavailable + from google.cloud.bigtable.row_data import _retry_read_rows_exception + + wrapped = ServiceUnavailable("testing") + exception = _make_grpc_call_error(wrapped) + assert _retry_read_rows_exception(exception) + + +def test__retry_read_rows_exception_deadline_exceeded_wrapped_in_grpc(): + from google.api_core.exceptions import DeadlineExceeded + from google.cloud.bigtable.row_data import _retry_read_rows_exception + + wrapped = DeadlineExceeded("testing") + exception = _make_grpc_call_error(wrapped) + assert _retry_read_rows_exception(exception) + + +def _make_partial_rows_data(*args, **kwargs): + from google.cloud.bigtable.row_data import PartialRowsData + + return PartialRowsData(*args, **kwargs) + + +def _partial_rows_data_consume_all(yrd): + return [row.row_key for row in yrd] + + +def _make_client(*args, **kwargs): + from google.cloud.bigtable.client import Client + + return Client(*args, **kwargs) + + +def test_partial_rows_data_constructor(): + from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS + + client = _Client() + client._data_stub = mock.MagicMock() + request = object() + partial_rows_data = _make_partial_rows_data(client._data_stub.ReadRows, request) + assert partial_rows_data.request is request + assert partial_rows_data.rows == {} + assert partial_rows_data.retry == DEFAULT_RETRY_READ_ROWS + + +def test_partial_rows_data_constructor_with_retry(): + from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS + + client = _Client() + client._data_stub = mock.MagicMock() + request = object() + retry = DEFAULT_RETRY_READ_ROWS + partial_rows_data = _make_partial_rows_data( + client._data_stub.ReadRows, request, retry + ) + partial_rows_data.read_method.assert_called_once_with( + request, timeout=DEFAULT_RETRY_READ_ROWS.deadline + 1 + ) + assert partial_rows_data.request is request + assert partial_rows_data.rows == {} + assert partial_rows_data.retry == retry + + +def test_partial_rows_data___eq__(): + client = _Client() + client._data_stub = mock.MagicMock() + request = object() + partial_rows_data1 = _make_partial_rows_data(client._data_stub.ReadRows, request) + partial_rows_data2 = _make_partial_rows_data(client._data_stub.ReadRows, request) + assert partial_rows_data1.rows == partial_rows_data2.rows + + +def test_partial_rows_data___eq__type_differ(): + client = _Client() + client._data_stub = mock.MagicMock() + request = object() + partial_rows_data1 = _make_partial_rows_data(client._data_stub.ReadRows, request) + partial_rows_data2 = object() + assert not (partial_rows_data1 == partial_rows_data2) - wrapped = DeadlineExceeded("testing") - exception = self._make_grpc_call_error(wrapped) - self.assertTrue(self._call_fut(exception)) +def test_partial_rows_data___ne__same_value(): + client = _Client() + client._data_stub = mock.MagicMock() + request = object() + partial_rows_data1 = _make_partial_rows_data(client._data_stub.ReadRows, request) + partial_rows_data2 = _make_partial_rows_data(client._data_stub.ReadRows, request) + assert partial_rows_data1 != partial_rows_data2 -class TestPartialRowsData(unittest.TestCase): - ROW_KEY = b"row-key" - FAMILY_NAME = u"family" - QUALIFIER = b"qualifier" + +def test_partial_rows_data___ne__(): + client = _Client() + client._data_stub = mock.MagicMock() + request = object() + partial_rows_data1 = _make_partial_rows_data(client._data_stub.ReadRows, request) + partial_rows_data2 = _make_partial_rows_data(client._data_stub.ReadRows, request) + assert partial_rows_data1 != partial_rows_data2 + + +def test_partial_rows_data_rows_getter(): + client = _Client() + client._data_stub = mock.MagicMock() + request = object() + partial_rows_data = _make_partial_rows_data(client._data_stub.ReadRows, request) + partial_rows_data.rows = value = object() + assert partial_rows_data.rows is value + + +def test_partial_rows_data_state_start(): + client = _Client() + iterator = _MockCancellableIterator() + client._data_stub = mock.MagicMock() + client._data_stub.ReadRows.side_effect = [iterator] + request = object() + yrd = _make_partial_rows_data(client._data_stub.ReadRows, request) + assert yrd.state == yrd.NEW_ROW + + +def test_partial_rows_data_state_new_row_w_row(): + from google.cloud.bigtable_v2.services.bigtable import BigtableClient + + chunk = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=True, + ) + chunks = [chunk] + + response = _ReadRowsResponseV2(chunks) + iterator = _MockCancellableIterator(response) + + data_api = mock.create_autospec(BigtableClient) + + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + client._table_data_client = data_api + request = object() + + yrd = _make_partial_rows_data(client._table_data_client.read_rows, request) + assert yrd.retry._deadline == 60.0 + + yrd.response_iterator = iterator + rows = [row for row in yrd] + + result = rows[0] + assert result.row_key == ROW_KEY + assert yrd._counter == 1 + assert yrd.state == yrd.NEW_ROW + + +def test_partial_rows_data_multiple_chunks(): + from google.cloud.bigtable_v2.services.bigtable import BigtableClient + + chunk1 = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=False, + ) + chunk2 = _ReadRowsResponseCellChunkPB( + qualifier=QUALIFIER + b"1", + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=True, + ) + chunks = [chunk1, chunk2] + + response = _ReadRowsResponseV2(chunks) + iterator = _MockCancellableIterator(response) + data_api = mock.create_autospec(BigtableClient) + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + client._table_data_client = data_api + request = object() + + yrd = _make_partial_rows_data(data_api.read_rows, request) + + yrd.response_iterator = iterator + rows = [row for row in yrd] + result = rows[0] + assert result.row_key == ROW_KEY + assert yrd._counter == 1 + assert yrd.state == yrd.NEW_ROW + + +def test_partial_rows_data_cancel(): + client = _Client() + response_iterator = _MockCancellableIterator() + client._data_stub = mock.MagicMock() + client._data_stub.ReadRows.side_effect = [response_iterator] + request = object() + yield_rows_data = _make_partial_rows_data(client._data_stub.ReadRows, request) + assert response_iterator.cancel_calls == 0 + yield_rows_data.cancel() + assert response_iterator.cancel_calls == 1 + assert list(yield_rows_data) == [] + + +def test_partial_rows_data_cancel_between_chunks(): + from google.cloud.bigtable_v2.services.bigtable import BigtableClient + + chunk1 = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=True, + ) + chunk2 = _ReadRowsResponseCellChunkPB( + qualifier=QUALIFIER + b"1", + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=True, + ) + chunks = [chunk1, chunk2] + response = _ReadRowsResponseV2(chunks) + response_iterator = _MockCancellableIterator(response) + + client = _Client() + data_api = mock.create_autospec(BigtableClient) + client._table_data_client = data_api + request = object() + yrd = _make_partial_rows_data(data_api.read_rows, request) + yrd.response_iterator = response_iterator + + rows = [] + for row in yrd: + yrd.cancel() + rows.append(row) + + assert response_iterator.cancel_calls == 1 + assert list(yrd) == [] + + +# 'consume_next' tested via 'TestPartialRowsData_JSON_acceptance_tests' + + +def test_partial_rows_data__copy_from_previous_unset(): + client = _Client() + client._data_stub = mock.MagicMock() + request = object() + yrd = _make_partial_rows_data(client._data_stub.read_rows, request) + cell = _PartialCellData() + yrd._copy_from_previous(cell) + assert cell.row_key == b"" + assert cell.family_name == u"" + assert cell.qualifier is None + assert cell.timestamp_micros == 0 + assert cell.labels == [] + + +def test_partial_rows_data__copy_from_previous_blank(): + ROW_KEY = "RK" + FAMILY_NAME = u"A" + QUALIFIER = b"C" + TIMESTAMP_MICROS = 100 + LABELS = ["L1", "L2"] + client = _Client() + client._data_stub = mock.MagicMock() + request = object() + yrd = _make_partial_rows_data(client._data_stub.ReadRows, request) + cell = _PartialCellData( + row_key=ROW_KEY, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + labels=LABELS, + ) + yrd._previous_cell = _PartialCellData() + yrd._copy_from_previous(cell) + assert cell.row_key == ROW_KEY + assert cell.family_name == FAMILY_NAME + assert cell.qualifier == QUALIFIER + assert cell.timestamp_micros == TIMESTAMP_MICROS + assert cell.labels == LABELS + + +def test_partial_rows_data__copy_from_previous_filled(): + from google.cloud.bigtable_v2.services.bigtable import BigtableClient + + ROW_KEY = "RK" + FAMILY_NAME = u"A" + QUALIFIER = b"C" TIMESTAMP_MICROS = 100 - VALUE = b"value" + LABELS = ["L1", "L2"] + client = _Client() + data_api = mock.create_autospec(BigtableClient) + client._data_stub = data_api + request = object() + yrd = _make_partial_rows_data(client._data_stub.read_rows, request) + yrd._previous_cell = _PartialCellData( + row_key=ROW_KEY, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + labels=LABELS, + ) + cell = _PartialCellData() + yrd._copy_from_previous(cell) + assert cell.row_key == ROW_KEY + assert cell.family_name == FAMILY_NAME + assert cell.qualifier == QUALIFIER + assert cell.timestamp_micros == 0 + assert cell.labels == [] + + +def test_partial_rows_data_valid_last_scanned_row_key_on_start(): + client = _Client() + response = _ReadRowsResponseV2(chunks=(), last_scanned_row_key="2.AFTER") + iterator = _MockCancellableIterator(response) + client._data_stub = mock.MagicMock() + client._data_stub.read_rows.side_effect = [iterator] + request = object() + yrd = _make_partial_rows_data(client._data_stub.read_rows, request) + yrd.last_scanned_row_key = "1.BEFORE" + _partial_rows_data_consume_all(yrd) + assert yrd.last_scanned_row_key == "2.AFTER" + + +def test_partial_rows_data_invalid_empty_chunk(): + from google.cloud.bigtable.row_data import InvalidChunk + from google.cloud.bigtable_v2.services.bigtable import BigtableClient + + client = _Client() + chunks = _generate_cell_chunks([""]) + response = _ReadRowsResponseV2(chunks) + iterator = _MockCancellableIterator(response) + client._data_stub = mock.create_autospec(BigtableClient) + client._data_stub.read_rows.side_effect = [iterator] + request = object() + yrd = _make_partial_rows_data(client._data_stub.read_rows, request) + with pytest.raises(InvalidChunk): + _partial_rows_data_consume_all(yrd) + + +def test_partial_rows_data_state_cell_in_progress(): + from google.cloud.bigtable_v2.services.bigtable import BigtableClient + + LABELS = ["L1", "L2"] + + request = object() + client = _Client() + client._data_stub = mock.create_autospec(BigtableClient) + yrd = _make_partial_rows_data(client._data_stub.read_rows, request) + + chunk = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + labels=LABELS, + ) + yrd._update_cell(chunk) + + more_cell_data = _ReadRowsResponseCellChunkPB(value=VALUE) + yrd._update_cell(more_cell_data) + + assert yrd._cell.row_key == ROW_KEY + assert yrd._cell.family_name == FAMILY_NAME + assert yrd._cell.qualifier == QUALIFIER + assert yrd._cell.timestamp_micros == TIMESTAMP_MICROS + assert yrd._cell.labels == LABELS + assert yrd._cell.value == VALUE + VALUE + + +def test_partial_rows_data_yield_rows_data(): + from google.cloud.bigtable_v2.services.bigtable import BigtableClient + + client = _Client() + + chunk = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=True, + ) + chunks = [chunk] + + response = _ReadRowsResponseV2(chunks) + iterator = _MockCancellableIterator(response) + data_api = mock.create_autospec(BigtableClient) + client._data_stub = data_api + client._data_stub.read_rows.side_effect = [iterator] + + request = object() - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_data import PartialRowsData + yrd = _make_partial_rows_data(client._data_stub.read_rows, request) - return PartialRowsData + result = _partial_rows_data_consume_all(yrd)[0] - @staticmethod - def _get_target_client_class(): - from google.cloud.bigtable.client import Client + assert result == ROW_KEY - return Client - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) +def test_partial_rows_data_yield_retry_rows_data(): + from google.api_core import retry - def test_constructor(self): - from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS + client = _Client() - client = _Client() - client._data_stub = mock.MagicMock() - request = object() - partial_rows_data = self._make_one(client._data_stub.ReadRows, request) - self.assertIs(partial_rows_data.request, request) - self.assertEqual(partial_rows_data.rows, {}) - self.assertEqual(partial_rows_data.retry, DEFAULT_RETRY_READ_ROWS) + retry_read_rows = retry.Retry(predicate=_read_rows_retry_exception) - def test_constructor_with_retry(self): - from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS + chunk = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=True, + ) + chunks = [chunk] - client = _Client() - client._data_stub = mock.MagicMock() - request = object() - retry = DEFAULT_RETRY_READ_ROWS - partial_rows_data = self._make_one(client._data_stub.ReadRows, request, retry) - partial_rows_data.read_method.assert_called_once_with( - request, timeout=DEFAULT_RETRY_READ_ROWS.deadline + 1 - ) - self.assertIs(partial_rows_data.request, request) - self.assertEqual(partial_rows_data.rows, {}) - self.assertEqual(partial_rows_data.retry, retry) - - def test___eq__(self): - client = _Client() - client._data_stub = mock.MagicMock() - request = object() - partial_rows_data1 = self._make_one(client._data_stub.ReadRows, request) - partial_rows_data2 = self._make_one(client._data_stub.ReadRows, request) - self.assertEqual(partial_rows_data1.rows, partial_rows_data2.rows) - - def test___eq__type_differ(self): - client = _Client() - client._data_stub = mock.MagicMock() - request = object() - partial_rows_data1 = self._make_one(client._data_stub.ReadRows, request) - partial_rows_data2 = object() - self.assertNotEqual(partial_rows_data1, partial_rows_data2) - - def test___ne__same_value(self): - client = _Client() - client._data_stub = mock.MagicMock() - request = object() - partial_rows_data1 = self._make_one(client._data_stub.ReadRows, request) - partial_rows_data2 = self._make_one(client._data_stub.ReadRows, request) - comparison_val = partial_rows_data1 != partial_rows_data2 - self.assertTrue(comparison_val) - - def test___ne__(self): - client = _Client() - client._data_stub = mock.MagicMock() - request = object() - partial_rows_data1 = self._make_one(client._data_stub.ReadRows, request) - partial_rows_data2 = self._make_one(client._data_stub.ReadRows, request) - self.assertNotEqual(partial_rows_data1, partial_rows_data2) - - def test_rows_getter(self): - client = _Client() - client._data_stub = mock.MagicMock() - request = object() - partial_rows_data = self._make_one(client._data_stub.ReadRows, request) - partial_rows_data.rows = value = object() - self.assertIs(partial_rows_data.rows, value) - - def _make_client(self, *args, **kwargs): - return self._get_target_client_class()(*args, **kwargs) - - def test_state_start(self): - client = _Client() - iterator = _MockCancellableIterator() - client._data_stub = mock.MagicMock() - client._data_stub.ReadRows.side_effect = [iterator] - request = object() - yrd = self._make_one(client._data_stub.ReadRows, request) - self.assertEqual(yrd.state, yrd.NEW_ROW) - - def test_state_new_row_w_row(self): - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - - chunk = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - ) - chunks = [chunk] + response = _ReadRowsResponseV2(chunks) + failure_iterator = _MockFailureIterator_1() + iterator = _MockCancellableIterator(response) + client._data_stub = mock.MagicMock() + client._data_stub.ReadRows.side_effect = [failure_iterator, iterator] - response = _ReadRowsResponseV2(chunks) - iterator = _MockCancellableIterator(response) + request = object() - data_api = mock.create_autospec(BigtableClient) + yrd = _make_partial_rows_data(client._data_stub.ReadRows, request, retry_read_rows) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - client._table_data_client = data_api - request = object() - - yrd = self._make_one(client._table_data_client.read_rows, request) - self.assertEqual(yrd.retry._deadline, 60.0) - - yrd.response_iterator = iterator - rows = [row for row in yrd] - - result = rows[0] - self.assertEqual(result.row_key, self.ROW_KEY) - self.assertEqual(yrd._counter, 1) - self.assertEqual(yrd.state, yrd.NEW_ROW) - - def test_multiple_chunks(self): - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - - chunk1 = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=False, - ) - chunk2 = _ReadRowsResponseCellChunkPB( - qualifier=self.QUALIFIER + b"1", - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - ) - chunks = [chunk1, chunk2] - - response = _ReadRowsResponseV2(chunks) - iterator = _MockCancellableIterator(response) - data_api = mock.create_autospec(BigtableClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - client._table_data_client = data_api - request = object() - - yrd = self._make_one(data_api.read_rows, request) - - yrd.response_iterator = iterator - rows = [row for row in yrd] - result = rows[0] - self.assertEqual(result.row_key, self.ROW_KEY) - self.assertEqual(yrd._counter, 1) - self.assertEqual(yrd.state, yrd.NEW_ROW) - - def test_cancel(self): - client = _Client() - response_iterator = _MockCancellableIterator() - client._data_stub = mock.MagicMock() - client._data_stub.ReadRows.side_effect = [response_iterator] - request = object() - yield_rows_data = self._make_one(client._data_stub.ReadRows, request) - self.assertEqual(response_iterator.cancel_calls, 0) - yield_rows_data.cancel() - self.assertEqual(response_iterator.cancel_calls, 1) - self.assertEqual(list(yield_rows_data), []) - - def test_cancel_between_chunks(self): - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - - chunk1 = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - ) - chunk2 = _ReadRowsResponseCellChunkPB( - qualifier=self.QUALIFIER + b"1", - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - ) - chunks = [chunk1, chunk2] - response = _ReadRowsResponseV2(chunks) - response_iterator = _MockCancellableIterator(response) - - client = _Client() - data_api = mock.create_autospec(BigtableClient) - client._table_data_client = data_api - request = object() - yrd = self._make_one(data_api.read_rows, request) - yrd.response_iterator = response_iterator - - rows = [] - for row in yrd: - yrd.cancel() - rows.append(row) - - self.assertEqual(response_iterator.cancel_calls, 1) - self.assertEqual(list(yrd), []) - - # 'consume_next' tested via 'TestPartialRowsData_JSON_acceptance_tests' - - def test__copy_from_previous_unset(self): - client = _Client() - client._data_stub = mock.MagicMock() - request = object() - yrd = self._make_one(client._data_stub.read_rows, request) - cell = _PartialCellData() - yrd._copy_from_previous(cell) - self.assertEqual(cell.row_key, b"") - self.assertEqual(cell.family_name, u"") - self.assertIsNone(cell.qualifier) - self.assertEqual(cell.timestamp_micros, 0) - self.assertEqual(cell.labels, []) - - def test__copy_from_previous_blank(self): - ROW_KEY = "RK" - FAMILY_NAME = u"A" - QUALIFIER = b"C" - TIMESTAMP_MICROS = 100 - LABELS = ["L1", "L2"] - client = _Client() - client._data_stub = mock.MagicMock() - request = object() - yrd = self._make_one(client._data_stub.ReadRows, request) - cell = _PartialCellData( - row_key=ROW_KEY, - family_name=FAMILY_NAME, - qualifier=QUALIFIER, - timestamp_micros=TIMESTAMP_MICROS, - labels=LABELS, - ) - yrd._previous_cell = _PartialCellData() - yrd._copy_from_previous(cell) - self.assertEqual(cell.row_key, ROW_KEY) - self.assertEqual(cell.family_name, FAMILY_NAME) - self.assertEqual(cell.qualifier, QUALIFIER) - self.assertEqual(cell.timestamp_micros, TIMESTAMP_MICROS) - self.assertEqual(cell.labels, LABELS) - - def test__copy_from_previous_filled(self): - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - - ROW_KEY = "RK" - FAMILY_NAME = u"A" - QUALIFIER = b"C" - TIMESTAMP_MICROS = 100 - LABELS = ["L1", "L2"] - client = _Client() - data_api = mock.create_autospec(BigtableClient) - client._data_stub = data_api - request = object() - yrd = self._make_one(client._data_stub.read_rows, request) - yrd._previous_cell = _PartialCellData( - row_key=ROW_KEY, - family_name=FAMILY_NAME, - qualifier=QUALIFIER, - timestamp_micros=TIMESTAMP_MICROS, - labels=LABELS, - ) - cell = _PartialCellData() - yrd._copy_from_previous(cell) - self.assertEqual(cell.row_key, ROW_KEY) - self.assertEqual(cell.family_name, FAMILY_NAME) - self.assertEqual(cell.qualifier, QUALIFIER) - self.assertEqual(cell.timestamp_micros, 0) - self.assertEqual(cell.labels, []) - - def test_valid_last_scanned_row_key_on_start(self): - client = _Client() - response = _ReadRowsResponseV2(chunks=(), last_scanned_row_key="2.AFTER") - iterator = _MockCancellableIterator(response) - client._data_stub = mock.MagicMock() - client._data_stub.read_rows.side_effect = [iterator] - request = object() - yrd = self._make_one(client._data_stub.read_rows, request) - yrd.last_scanned_row_key = "1.BEFORE" - self._consume_all(yrd) - self.assertEqual(yrd.last_scanned_row_key, "2.AFTER") - - def test_invalid_empty_chunk(self): - from google.cloud.bigtable.row_data import InvalidChunk - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - - client = _Client() - chunks = _generate_cell_chunks([""]) - response = _ReadRowsResponseV2(chunks) - iterator = _MockCancellableIterator(response) - client._data_stub = mock.create_autospec(BigtableClient) - client._data_stub.read_rows.side_effect = [iterator] - request = object() - yrd = self._make_one(client._data_stub.read_rows, request) - with self.assertRaises(InvalidChunk): - self._consume_all(yrd) - - def test_state_cell_in_progress(self): - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - - LABELS = ["L1", "L2"] - - request = object() - client = _Client() - client._data_stub = mock.create_autospec(BigtableClient) - yrd = self._make_one(client._data_stub.read_rows, request) - - chunk = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - labels=LABELS, - ) - yrd._update_cell(chunk) - - more_cell_data = _ReadRowsResponseCellChunkPB(value=self.VALUE) - yrd._update_cell(more_cell_data) - - self.assertEqual(yrd._cell.row_key, self.ROW_KEY) - self.assertEqual(yrd._cell.family_name, self.FAMILY_NAME) - self.assertEqual(yrd._cell.qualifier, self.QUALIFIER) - self.assertEqual(yrd._cell.timestamp_micros, self.TIMESTAMP_MICROS) - self.assertEqual(yrd._cell.labels, LABELS) - self.assertEqual(yrd._cell.value, self.VALUE + self.VALUE) - - def test_yield_rows_data(self): - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - - client = _Client() - - chunk = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - ) - chunks = [chunk] + result = _partial_rows_data_consume_all(yrd)[0] - response = _ReadRowsResponseV2(chunks) - iterator = _MockCancellableIterator(response) - data_api = mock.create_autospec(BigtableClient) - client._data_stub = data_api - client._data_stub.read_rows.side_effect = [iterator] + assert result == ROW_KEY - request = object() - yrd = self._make_one(client._data_stub.read_rows, request) +def _make_read_rows_request_manager(*args, **kwargs): + from google.cloud.bigtable.row_data import _ReadRowsRequestManager - result = self._consume_all(yrd)[0] + return _ReadRowsRequestManager(*args, **kwargs) - self.assertEqual(result, self.ROW_KEY) - def test_yield_retry_rows_data(self): - from google.api_core import retry +@pytest.fixture(scope="session") +def rrrm_data(): + from google.cloud.bigtable import row_set - client = _Client() + row_range1 = row_set.RowRange(b"row_key21", b"row_key29") + row_range2 = row_set.RowRange(b"row_key31", b"row_key39") + row_range3 = row_set.RowRange(b"row_key41", b"row_key49") - retry_read_rows = retry.Retry(predicate=_read_rows_retry_exception) + request = _ReadRowsRequestPB(table_name=TABLE_NAME) + request.rows.row_ranges.append(row_range1.get_range_kwargs()) + request.rows.row_ranges.append(row_range2.get_range_kwargs()) + request.rows.row_ranges.append(row_range3.get_range_kwargs()) - chunk = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - ) - chunks = [chunk] + yield { + "row_range1": row_range1, + "row_range2": row_range2, + "row_range3": row_range3, + "request": request, + } - response = _ReadRowsResponseV2(chunks) - failure_iterator = _MockFailureIterator_1() - iterator = _MockCancellableIterator(response) - client._data_stub = mock.MagicMock() - client._data_stub.ReadRows.side_effect = [failure_iterator, iterator] - request = object() +def test_RRRM_constructor(): + request = mock.Mock() + last_scanned_key = "last_key" + rows_read_so_far = 10 - yrd = self._make_one(client._data_stub.ReadRows, request, retry_read_rows) + request_manager = _make_read_rows_request_manager( + request, last_scanned_key, rows_read_so_far + ) + assert request == request_manager.message + assert last_scanned_key == request_manager.last_scanned_key + assert rows_read_so_far == request_manager.rows_read_so_far - result = self._consume_all(yrd)[0] - self.assertEqual(result, self.ROW_KEY) +def test_RRRM__filter_row_key(): + table_name = "table_name" + request = _ReadRowsRequestPB(table_name=table_name) + request.rows.row_keys.extend([b"row_key1", b"row_key2", b"row_key3", b"row_key4"]) - def _consume_all(self, yrd): - return [row.row_key for row in yrd] + last_scanned_key = b"row_key2" + request_manager = _make_read_rows_request_manager(request, last_scanned_key, 2) + row_keys = request_manager._filter_rows_keys() + expected_row_keys = [b"row_key3", b"row_key4"] + assert expected_row_keys == row_keys -class Test_ReadRowsRequestManager(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.table_name = "table_name" - cls.row_range1 = RowRange(b"row_key21", b"row_key29") - cls.row_range2 = RowRange(b"row_key31", b"row_key39") - cls.row_range3 = RowRange(b"row_key41", b"row_key49") - cls.request = _ReadRowsRequestPB(table_name=cls.table_name) - cls.request.rows.row_ranges.append(cls.row_range1.get_range_kwargs()) - cls.request.rows.row_ranges.append(cls.row_range2.get_range_kwargs()) - cls.request.rows.row_ranges.append(cls.row_range3.get_range_kwargs()) +def test_RRRM__filter_row_ranges_all_ranges_added_back(rrrm_data): + from google.cloud.bigtable_v2.types import data as data_v2_pb2 - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_data import _ReadRowsRequestManager + request = rrrm_data["request"] + last_scanned_key = b"row_key14" + request_manager = _make_read_rows_request_manager(request, last_scanned_key, 2) + row_ranges = request_manager._filter_row_ranges() - return _ReadRowsRequestManager + exp_row_range1 = data_v2_pb2.RowRange( + start_key_closed=b"row_key21", end_key_open=b"row_key29" + ) + exp_row_range2 = data_v2_pb2.RowRange( + start_key_closed=b"row_key31", end_key_open=b"row_key39" + ) + exp_row_range3 = data_v2_pb2.RowRange( + start_key_closed=b"row_key41", end_key_open=b"row_key49" + ) + exp_row_ranges = [exp_row_range1, exp_row_range2, exp_row_range3] - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) + assert exp_row_ranges == row_ranges - def test_constructor(self): - request = mock.Mock() - last_scanned_key = "last_key" - rows_read_so_far = 10 - request_manager = self._make_one(request, last_scanned_key, rows_read_so_far) - self.assertEqual(request, request_manager.message) - self.assertEqual(last_scanned_key, request_manager.last_scanned_key) - self.assertEqual(rows_read_so_far, request_manager.rows_read_so_far) +def test_RRRM__filter_row_ranges_all_ranges_already_read(rrrm_data): + request = rrrm_data["request"] + last_scanned_key = b"row_key54" + request_manager = _make_read_rows_request_manager(request, last_scanned_key, 2) + row_ranges = request_manager._filter_row_ranges() - def test__filter_row_key(self): - table_name = "table_name" - request = _ReadRowsRequestPB(table_name=table_name) - request.rows.row_keys.extend( - [b"row_key1", b"row_key2", b"row_key3", b"row_key4"] - ) + assert row_ranges == [] - last_scanned_key = b"row_key2" - request_manager = self._make_one(request, last_scanned_key, 2) - row_keys = request_manager._filter_rows_keys() - expected_row_keys = [b"row_key3", b"row_key4"] - self.assertEqual(expected_row_keys, row_keys) +def test_RRRM__filter_row_ranges_all_ranges_already_read_open_closed(): + from google.cloud.bigtable import row_set - def test__filter_row_ranges_all_ranges_added_back(self): - last_scanned_key = b"row_key14" - request_manager = self._make_one(self.request, last_scanned_key, 2) - row_ranges = request_manager._filter_row_ranges() + last_scanned_key = b"row_key54" - exp_row_range1 = data_v2_pb2.RowRange( - start_key_closed=b"row_key21", end_key_open=b"row_key29" - ) - exp_row_range2 = data_v2_pb2.RowRange( - start_key_closed=b"row_key31", end_key_open=b"row_key39" - ) - exp_row_range3 = data_v2_pb2.RowRange( - start_key_closed=b"row_key41", end_key_open=b"row_key49" - ) - exp_row_ranges = [exp_row_range1, exp_row_range2, exp_row_range3] + row_range1 = row_set.RowRange(b"row_key21", b"row_key29", False, True) + row_range2 = row_set.RowRange(b"row_key31", b"row_key39") + row_range3 = row_set.RowRange(b"row_key41", b"row_key49", False, True) - self.assertEqual(exp_row_ranges, row_ranges) + request = _ReadRowsRequestPB(table_name=TABLE_NAME) + request.rows.row_ranges.append(row_range1.get_range_kwargs()) + request.rows.row_ranges.append(row_range2.get_range_kwargs()) + request.rows.row_ranges.append(row_range3.get_range_kwargs()) - def test__filter_row_ranges_all_ranges_already_read(self): - last_scanned_key = b"row_key54" - request_manager = self._make_one(self.request, last_scanned_key, 2) - row_ranges = request_manager._filter_row_ranges() + request_manager = _make_read_rows_request_manager(request, last_scanned_key, 2) + request_manager.new_message = _ReadRowsRequestPB(table_name=TABLE_NAME) + row_ranges = request_manager._filter_row_ranges() - self.assertEqual(row_ranges, []) + assert row_ranges == [] - def test__filter_row_ranges_all_ranges_already_read_open_closed(self): - last_scanned_key = b"row_key54" - row_range1 = RowRange(b"row_key21", b"row_key29", False, True) - row_range2 = RowRange(b"row_key31", b"row_key39") - row_range3 = RowRange(b"row_key41", b"row_key49", False, True) +def test_RRRM__filter_row_ranges_some_ranges_already_read(rrrm_data): + from google.cloud.bigtable_v2.types import data as data_v2_pb2 - request = _ReadRowsRequestPB(table_name=self.table_name) - request.rows.row_ranges.append(row_range1.get_range_kwargs()) - request.rows.row_ranges.append(row_range2.get_range_kwargs()) - request.rows.row_ranges.append(row_range3.get_range_kwargs()) + request = rrrm_data["request"] + last_scanned_key = b"row_key22" + request_manager = _make_read_rows_request_manager(request, last_scanned_key, 2) + request_manager.new_message = _ReadRowsRequestPB(table_name=TABLE_NAME) + row_ranges = request_manager._filter_row_ranges() - request_manager = self._make_one(request, last_scanned_key, 2) - request_manager.new_message = _ReadRowsRequestPB(table_name=self.table_name) - row_ranges = request_manager._filter_row_ranges() + exp_row_range1 = data_v2_pb2.RowRange( + start_key_open=b"row_key22", end_key_open=b"row_key29" + ) + exp_row_range2 = data_v2_pb2.RowRange( + start_key_closed=b"row_key31", end_key_open=b"row_key39" + ) + exp_row_range3 = data_v2_pb2.RowRange( + start_key_closed=b"row_key41", end_key_open=b"row_key49" + ) + exp_row_ranges = [exp_row_range1, exp_row_range2, exp_row_range3] - self.assertEqual(row_ranges, []) + assert exp_row_ranges == row_ranges - def test__filter_row_ranges_some_ranges_already_read(self): - last_scanned_key = b"row_key22" - request_manager = self._make_one(self.request, last_scanned_key, 2) - request_manager.new_message = _ReadRowsRequestPB(table_name=self.table_name) - row_ranges = request_manager._filter_row_ranges() - exp_row_range1 = data_v2_pb2.RowRange( - start_key_open=b"row_key22", end_key_open=b"row_key29" - ) - exp_row_range2 = data_v2_pb2.RowRange( - start_key_closed=b"row_key31", end_key_open=b"row_key39" - ) - exp_row_range3 = data_v2_pb2.RowRange( - start_key_closed=b"row_key41", end_key_open=b"row_key49" - ) - exp_row_ranges = [exp_row_range1, exp_row_range2, exp_row_range3] +def test_RRRM_build_updated_request(rrrm_data): + from google.cloud.bigtable.row_filters import RowSampleFilter + from google.cloud.bigtable_v2 import types - self.assertEqual(exp_row_ranges, row_ranges) + row_range1 = rrrm_data["row_range1"] + row_filter = RowSampleFilter(0.33) + last_scanned_key = b"row_key25" + request = _ReadRowsRequestPB( + filter=row_filter.to_pb(), rows_limit=8, table_name=TABLE_NAME + ) + request.rows.row_ranges.append(row_range1.get_range_kwargs()) - def test_build_updated_request(self): - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable_v2.types import RowRange + request_manager = _make_read_rows_request_manager(request, last_scanned_key, 2) - row_filter = RowSampleFilter(0.33) - last_scanned_key = b"row_key25" - request = _ReadRowsRequestPB( - filter=row_filter.to_pb(), rows_limit=8, table_name=self.table_name - ) - request.rows.row_ranges.append(self.row_range1.get_range_kwargs()) + result = request_manager.build_updated_request() - request_manager = self._make_one(request, last_scanned_key, 2) + expected_result = _ReadRowsRequestPB( + table_name=TABLE_NAME, filter=row_filter.to_pb(), rows_limit=6 + ) - result = request_manager.build_updated_request() + row_range1 = types.RowRange( + start_key_open=last_scanned_key, end_key_open=row_range1.end_key + ) + expected_result.rows.row_ranges.append(row_range1) - expected_result = _ReadRowsRequestPB( - table_name=self.table_name, filter=row_filter.to_pb(), rows_limit=6 - ) + assert expected_result == result - row_range1 = RowRange( - start_key_open=last_scanned_key, end_key_open=self.row_range1.end_key - ) - expected_result.rows.row_ranges.append(row_range1) - self.assertEqual(expected_result, result) +def test_RRRM_build_updated_request_full_table(): + from google.cloud.bigtable_v2 import types - def test_build_updated_request_full_table(self): - from google.cloud.bigtable_v2.types import RowRange + last_scanned_key = b"row_key14" - last_scanned_key = b"row_key14" + request = _ReadRowsRequestPB(table_name=TABLE_NAME) + request_manager = _make_read_rows_request_manager(request, last_scanned_key, 2) - request = _ReadRowsRequestPB(table_name=self.table_name) - request_manager = self._make_one(request, last_scanned_key, 2) + result = request_manager.build_updated_request() + expected_result = _ReadRowsRequestPB(table_name=TABLE_NAME, filter={}) + row_range1 = types.RowRange(start_key_open=last_scanned_key) + expected_result.rows.row_ranges.append(row_range1) + assert expected_result == result - result = request_manager.build_updated_request() - expected_result = _ReadRowsRequestPB(table_name=self.table_name, filter={}) - row_range1 = RowRange(start_key_open=last_scanned_key) - expected_result.rows.row_ranges.append(row_range1) - self.assertEqual(expected_result, result) - def test_build_updated_request_no_start_key(self): - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable_v2.types import RowRange +def test_RRRM_build_updated_request_no_start_key(): + from google.cloud.bigtable.row_filters import RowSampleFilter + from google.cloud.bigtable_v2 import types - row_filter = RowSampleFilter(0.33) - last_scanned_key = b"row_key25" - request = _ReadRowsRequestPB( - filter=row_filter.to_pb(), rows_limit=8, table_name=self.table_name - ) - row_range1 = RowRange(end_key_open=b"row_key29") - request.rows.row_ranges.append(row_range1) + row_filter = RowSampleFilter(0.33) + last_scanned_key = b"row_key25" + request = _ReadRowsRequestPB( + filter=row_filter.to_pb(), rows_limit=8, table_name=TABLE_NAME + ) + row_range1 = types.RowRange(end_key_open=b"row_key29") + request.rows.row_ranges.append(row_range1) - request_manager = self._make_one(request, last_scanned_key, 2) + request_manager = _make_read_rows_request_manager(request, last_scanned_key, 2) - result = request_manager.build_updated_request() + result = request_manager.build_updated_request() - expected_result = _ReadRowsRequestPB( - table_name=self.table_name, filter=row_filter.to_pb(), rows_limit=6 - ) + expected_result = _ReadRowsRequestPB( + table_name=TABLE_NAME, filter=row_filter.to_pb(), rows_limit=6 + ) - row_range2 = RowRange( - start_key_open=last_scanned_key, end_key_open=b"row_key29" - ) - expected_result.rows.row_ranges.append(row_range2) + row_range2 = types.RowRange( + start_key_open=last_scanned_key, end_key_open=b"row_key29" + ) + expected_result.rows.row_ranges.append(row_range2) - self.assertEqual(expected_result, result) + assert expected_result == result - def test_build_updated_request_no_end_key(self): - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable_v2.types import RowRange - row_filter = RowSampleFilter(0.33) - last_scanned_key = b"row_key25" - request = _ReadRowsRequestPB( - filter=row_filter.to_pb(), rows_limit=8, table_name=self.table_name - ) +def test_RRRM_build_updated_request_no_end_key(): + from google.cloud.bigtable.row_filters import RowSampleFilter + from google.cloud.bigtable_v2 import types - row_range1 = RowRange(start_key_closed=b"row_key20") - request.rows.row_ranges.append(row_range1) + row_filter = RowSampleFilter(0.33) + last_scanned_key = b"row_key25" + request = _ReadRowsRequestPB( + filter=row_filter.to_pb(), rows_limit=8, table_name=TABLE_NAME + ) - request_manager = self._make_one(request, last_scanned_key, 2) + row_range1 = types.RowRange(start_key_closed=b"row_key20") + request.rows.row_ranges.append(row_range1) - result = request_manager.build_updated_request() + request_manager = _make_read_rows_request_manager(request, last_scanned_key, 2) - expected_result = _ReadRowsRequestPB( - table_name=self.table_name, filter=row_filter.to_pb(), rows_limit=6 - ) - row_range2 = RowRange(start_key_open=last_scanned_key) - expected_result.rows.row_ranges.append(row_range2) + result = request_manager.build_updated_request() - self.assertEqual(expected_result, result) + expected_result = _ReadRowsRequestPB( + table_name=TABLE_NAME, filter=row_filter.to_pb(), rows_limit=6 + ) + row_range2 = types.RowRange(start_key_open=last_scanned_key) + expected_result.rows.row_ranges.append(row_range2) - def test_build_updated_request_rows(self): - from google.cloud.bigtable.row_filters import RowSampleFilter + assert expected_result == result - row_filter = RowSampleFilter(0.33) - last_scanned_key = b"row_key4" - request = _ReadRowsRequestPB( - filter=row_filter.to_pb(), rows_limit=5, table_name=self.table_name - ) - request.rows.row_keys.extend( - [ - b"row_key1", - b"row_key2", - b"row_key4", - b"row_key5", - b"row_key7", - b"row_key9", - ] - ) - request_manager = self._make_one(request, last_scanned_key, 3) +def test_RRRM_build_updated_request_rows(): + from google.cloud.bigtable.row_filters import RowSampleFilter - result = request_manager.build_updated_request() + row_filter = RowSampleFilter(0.33) + last_scanned_key = b"row_key4" + request = _ReadRowsRequestPB( + filter=row_filter.to_pb(), rows_limit=5, table_name=TABLE_NAME + ) + request.rows.row_keys.extend( + [b"row_key1", b"row_key2", b"row_key4", b"row_key5", b"row_key7", b"row_key9"] + ) - expected_result = _ReadRowsRequestPB( - table_name=self.table_name, filter=row_filter.to_pb(), rows_limit=2 - ) - expected_result.rows.row_keys.extend([b"row_key5", b"row_key7", b"row_key9"]) + request_manager = _make_read_rows_request_manager(request, last_scanned_key, 3) - self.assertEqual(expected_result, result) + result = request_manager.build_updated_request() - def test_build_updated_request_rows_limit(self): - from google.cloud.bigtable_v2.types import RowRange + expected_result = _ReadRowsRequestPB( + table_name=TABLE_NAME, filter=row_filter.to_pb(), rows_limit=2 + ) + expected_result.rows.row_keys.extend([b"row_key5", b"row_key7", b"row_key9"]) - last_scanned_key = b"row_key14" + assert expected_result == result - request = _ReadRowsRequestPB(table_name=self.table_name, rows_limit=10) - request_manager = self._make_one(request, last_scanned_key, 2) - result = request_manager.build_updated_request() - expected_result = _ReadRowsRequestPB( - table_name=self.table_name, filter={}, rows_limit=8 - ) - row_range1 = RowRange(start_key_open=last_scanned_key) - expected_result.rows.row_ranges.append(row_range1) - self.assertEqual(expected_result, result) +def test_RRRM_build_updated_request_rows_limit(): + from google.cloud.bigtable_v2 import types - def test__key_already_read(self): - last_scanned_key = b"row_key14" - request = _ReadRowsRequestPB(table_name=self.table_name) - request_manager = self._make_one(request, last_scanned_key, 2) + last_scanned_key = b"row_key14" - self.assertTrue(request_manager._key_already_read(b"row_key11")) - self.assertFalse(request_manager._key_already_read(b"row_key16")) + request = _ReadRowsRequestPB(table_name=TABLE_NAME, rows_limit=10) + request_manager = _make_read_rows_request_manager(request, last_scanned_key, 2) + result = request_manager.build_updated_request() + expected_result = _ReadRowsRequestPB(table_name=TABLE_NAME, filter={}, rows_limit=8) + row_range1 = types.RowRange(start_key_open=last_scanned_key) + expected_result.rows.row_ranges.append(row_range1) + assert expected_result == result -class TestPartialRowsData_JSON_acceptance_tests(unittest.TestCase): - _json_tests = None +def test_RRRM__key_already_read(): + last_scanned_key = b"row_key14" + request = _ReadRowsRequestPB(table_name=TABLE_NAME) + request_manager = _make_read_rows_request_manager(request, last_scanned_key, 2) - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_data import PartialRowsData + assert request_manager._key_already_read(b"row_key11") + assert not request_manager._key_already_read(b"row_key16") - return PartialRowsData - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) +@pytest.fixture(scope="session") +def json_tests(): + dirname = os.path.dirname(__file__) + filename = os.path.join(dirname, "read-rows-acceptance-test.json") + raw = _parse_readrows_acceptance_tests(filename) + tests = {} + for (name, chunks, results) in raw: + tests[name] = chunks, results - def _load_json_test(self, test_name): - import os + yield tests - if self.__class__._json_tests is None: - dirname = os.path.dirname(__file__) - filename = os.path.join(dirname, "read-rows-acceptance-test.json") - raw = _parse_readrows_acceptance_tests(filename) - tests = self.__class__._json_tests = {} - for (name, chunks, results) in raw: - tests[name] = chunks, results - return self.__class__._json_tests[test_name] - # JSON Error cases: invalid chunks +# JSON Error cases: invalid chunks - def _fail_during_consume(self, testcase_name): - from google.cloud.bigtable.row_data import InvalidChunk - client = _Client() - chunks, results = self._load_json_test(testcase_name) - response = _ReadRowsResponseV2(chunks) - iterator = _MockCancellableIterator(response) - client._data_stub = mock.MagicMock() - client._data_stub.ReadRows.side_effect = [iterator] - request = object() - prd = self._make_one(client._data_stub.ReadRows, request) - with self.assertRaises(InvalidChunk): - prd.consume_all() - expected_result = self._sort_flattend_cells( - [result for result in results if not result["error"]] - ) - flattened = self._sort_flattend_cells(_flatten_cells(prd)) - self.assertEqual(flattened, expected_result) +def _fail_during_consume(json_tests, testcase_name): + from google.cloud.bigtable.row_data import InvalidChunk - def test_invalid_no_cell_key_before_commit(self): - self._fail_during_consume("invalid - no cell key before commit") + client = _Client() + chunks, results = json_tests[testcase_name] + response = _ReadRowsResponseV2(chunks) + iterator = _MockCancellableIterator(response) + client._data_stub = mock.MagicMock() + client._data_stub.ReadRows.side_effect = [iterator] + request = object() + prd = _make_partial_rows_data(client._data_stub.ReadRows, request) + with pytest.raises(InvalidChunk): + prd.consume_all() + expected_result = _sort_flattend_cells( + [result for result in results if not result["error"]] + ) + flattened = _sort_flattend_cells(_flatten_cells(prd)) + assert flattened == expected_result - def test_invalid_no_cell_key_before_value(self): - self._fail_during_consume("invalid - no cell key before value") - def test_invalid_new_col_family_wo_qualifier(self): - self._fail_during_consume("invalid - new col family must specify qualifier") +def test_prd_json_accept_invalid_no_cell_key_before_commit(json_tests): + _fail_during_consume(json_tests, "invalid - no cell key before commit") - def test_invalid_no_commit_between_rows(self): - self._fail_during_consume("invalid - no commit between rows") - def test_invalid_no_commit_after_first_row(self): - self._fail_during_consume("invalid - no commit after first row") +def test_prd_json_accept_invalid_no_cell_key_before_value(json_tests): + _fail_during_consume(json_tests, "invalid - no cell key before value") - def test_invalid_duplicate_row_key(self): - self._fail_during_consume("invalid - duplicate row key") - def test_invalid_new_row_missing_row_key(self): - self._fail_during_consume("invalid - new row missing row key") +def test_prd_json_accept_invalid_new_col_family_wo_qualifier(json_tests): + _fail_during_consume(json_tests, "invalid - new col family must specify qualifier") - def test_invalid_bare_reset(self): - self._fail_during_consume("invalid - bare reset") - def test_invalid_bad_reset_no_commit(self): - self._fail_during_consume("invalid - bad reset, no commit") +def test_prd_json_accept_invalid_no_commit_between_rows(json_tests): + _fail_during_consume(json_tests, "invalid - no commit between rows") - def test_invalid_missing_key_after_reset(self): - self._fail_during_consume("invalid - missing key after reset") - def test_invalid_reset_with_chunk(self): - self._fail_during_consume("invalid - reset with chunk") +def test_prd_json_accept_invalid_no_commit_after_first_row(json_tests): + _fail_during_consume(json_tests, "invalid - no commit after first row") - def test_invalid_commit_with_chunk(self): - self._fail_during_consume("invalid - commit with chunk") - # JSON Error cases: incomplete final row +def test_prd_json_accept_invalid_duplicate_row_key(json_tests): + _fail_during_consume(json_tests, "invalid - duplicate row key") - def _sort_flattend_cells(self, flattened): - import operator - key_func = operator.itemgetter("rk", "fm", "qual") - return sorted(flattened, key=key_func) +def test_prd_json_accept_invalid_new_row_missing_row_key(json_tests): + _fail_during_consume(json_tests, "invalid - new row missing row key") - def _incomplete_final_row(self, testcase_name): - client = _Client() - chunks, results = self._load_json_test(testcase_name) - response = _ReadRowsResponseV2(chunks) - iterator = _MockCancellableIterator(response) - client._data_stub = mock.MagicMock() - client._data_stub.ReadRows.side_effect = [iterator] - request = object() - prd = self._make_one(client._data_stub.ReadRows, request) - with self.assertRaises(ValueError): - prd.consume_all() - self.assertEqual(prd.state, prd.ROW_IN_PROGRESS) - expected_result = self._sort_flattend_cells( - [result for result in results if not result["error"]] - ) - flattened = self._sort_flattend_cells(_flatten_cells(prd)) - self.assertEqual(flattened, expected_result) - def test_invalid_no_commit(self): - self._incomplete_final_row("invalid - no commit") +def test_prd_json_accept_invalid_bare_reset(json_tests): + _fail_during_consume(json_tests, "invalid - bare reset") - def test_invalid_last_row_missing_commit(self): - self._incomplete_final_row("invalid - last row missing commit") - # Non-error cases +def test_prd_json_accept_invalid_bad_reset_no_commit(json_tests): + _fail_during_consume(json_tests, "invalid - bad reset, no commit") - _marker = object() - def _match_results(self, testcase_name, expected_result=_marker): - from google.cloud.bigtable_v2.services.bigtable import BigtableClient +def test_prd_json_accept_invalid_missing_key_after_reset(json_tests): + _fail_during_consume(json_tests, "invalid - missing key after reset") - client = _Client() - chunks, results = self._load_json_test(testcase_name) - response = _ReadRowsResponseV2(chunks) - iterator = _MockCancellableIterator(response) - data_api = mock.create_autospec(BigtableClient) - client._table_data_client = data_api - client._table_data_client.read_rows.side_effect = [iterator] - request = object() - prd = self._make_one(client._table_data_client.read_rows, request) + +def test_prd_json_accept_invalid_reset_with_chunk(json_tests): + _fail_during_consume(json_tests, "invalid - reset with chunk") + + +def test_prd_json_accept_invalid_commit_with_chunk(json_tests): + _fail_during_consume(json_tests, "invalid - commit with chunk") + + +# JSON Error cases: incomplete final row + + +def _sort_flattend_cells(flattened): + import operator + + key_func = operator.itemgetter("rk", "fm", "qual") + return sorted(flattened, key=key_func) + + +def _incomplete_final_row(json_tests, testcase_name): + client = _Client() + chunks, results = json_tests[testcase_name] + response = _ReadRowsResponseV2(chunks) + iterator = _MockCancellableIterator(response) + client._data_stub = mock.MagicMock() + client._data_stub.ReadRows.side_effect = [iterator] + request = object() + prd = _make_partial_rows_data(client._data_stub.ReadRows, request) + with pytest.raises(ValueError): prd.consume_all() - flattened = self._sort_flattend_cells(_flatten_cells(prd)) - if expected_result is self._marker: - expected_result = self._sort_flattend_cells(results) - self.assertEqual(flattened, expected_result) + assert prd.state == prd.ROW_IN_PROGRESS + expected_result = _sort_flattend_cells( + [result for result in results if not result["error"]] + ) + flattened = _sort_flattend_cells(_flatten_cells(prd)) + assert flattened == expected_result + + +def test_prd_json_accept_invalid_no_commit(json_tests): + _incomplete_final_row(json_tests, "invalid - no commit") - def test_bare_commit_implies_ts_zero(self): - self._match_results("bare commit implies ts=0") - def test_simple_row_with_timestamp(self): - self._match_results("simple row with timestamp") +def test_prd_json_accept_invalid_last_row_missing_commit(json_tests): + _incomplete_final_row(json_tests, "invalid - last row missing commit") - def test_missing_timestamp_implies_ts_zero(self): - self._match_results("missing timestamp, implied ts=0") - def test_empty_cell_value(self): - self._match_results("empty cell value") +# Non-error cases - def test_two_unsplit_cells(self): - self._match_results("two unsplit cells") +_marker = object() - def test_two_qualifiers(self): - self._match_results("two qualifiers") - def test_two_families(self): - self._match_results("two families") +def _match_results(json_tests, testcase_name, expected_result=_marker): + from google.cloud.bigtable_v2.services.bigtable import BigtableClient - def test_with_labels(self): - self._match_results("with labels") + client = _Client() + chunks, results = json_tests[testcase_name] + response = _ReadRowsResponseV2(chunks) + iterator = _MockCancellableIterator(response) + data_api = mock.create_autospec(BigtableClient) + client._table_data_client = data_api + client._table_data_client.read_rows.side_effect = [iterator] + request = object() + prd = _make_partial_rows_data(client._table_data_client.read_rows, request) + prd.consume_all() + flattened = _sort_flattend_cells(_flatten_cells(prd)) + if expected_result is _marker: + expected_result = _sort_flattend_cells(results) + assert flattened == expected_result - def test_split_cell_bare_commit(self): - self._match_results("split cell, bare commit") - def test_split_cell(self): - self._match_results("split cell") +def test_prd_json_accept_bare_commit_implies_ts_zero(json_tests): + _match_results(json_tests, "bare commit implies ts=0") - def test_split_four_ways(self): - self._match_results("split four ways") - def test_two_split_cells(self): - self._match_results("two split cells") +def test_prd_json_accept_simple_row_with_timestamp(json_tests): + _match_results(json_tests, "simple row with timestamp") - def test_multi_qualifier_splits(self): - self._match_results("multi-qualifier splits") - def test_multi_qualifier_multi_split(self): - self._match_results("multi-qualifier multi-split") +def test_prd_json_accept_missing_timestamp_implies_ts_zero(json_tests): + _match_results(json_tests, "missing timestamp, implied ts=0") - def test_multi_family_split(self): - self._match_results("multi-family split") - def test_two_rows(self): - self._match_results("two rows") +def test_prd_json_accept_empty_cell_value(json_tests): + _match_results(json_tests, "empty cell value") - def test_two_rows_implicit_timestamp(self): - self._match_results("two rows implicit timestamp") - def test_two_rows_empty_value(self): - self._match_results("two rows empty value") +def test_prd_json_accept_two_unsplit_cells(json_tests): + _match_results(json_tests, "two unsplit cells") - def test_two_rows_one_with_multiple_cells(self): - self._match_results("two rows, one with multiple cells") - def test_two_rows_multiple_cells_multiple_families(self): - self._match_results("two rows, multiple cells, multiple families") +def test_prd_json_accept_two_qualifiers(json_tests): + _match_results(json_tests, "two qualifiers") - def test_two_rows_multiple_cells(self): - self._match_results("two rows, multiple cells") - def test_two_rows_four_cells_two_labels(self): - self._match_results("two rows, four cells, 2 labels") +def test_prd_json_accept_two_families(json_tests): + _match_results(json_tests, "two families") - def test_two_rows_with_splits_same_timestamp(self): - self._match_results("two rows with splits, same timestamp") - def test_no_data_after_reset(self): - # JSON testcase has `"results": null` - self._match_results("no data after reset", expected_result=[]) +def test_prd_json_accept_with_labels(json_tests): + _match_results(json_tests, "with labels") - def test_simple_reset(self): - self._match_results("simple reset") - def test_reset_to_new_val(self): - self._match_results("reset to new val") +def test_prd_json_accept_split_cell_bare_commit(json_tests): + _match_results(json_tests, "split cell, bare commit") - def test_reset_to_new_qual(self): - self._match_results("reset to new qual") - def test_reset_with_splits(self): - self._match_results("reset with splits") +def test_prd_json_accept_split_cell(json_tests): + _match_results(json_tests, "split cell") - def test_two_resets(self): - self._match_results("two resets") - def test_reset_to_new_row(self): - self._match_results("reset to new row") +def test_prd_json_accept_split_four_ways(json_tests): + _match_results(json_tests, "split four ways") - def test_reset_in_between_chunks(self): - self._match_results("reset in between chunks") - def test_empty_cell_chunk(self): - self._match_results("empty cell chunk") +def test_prd_json_accept_two_split_cells(json_tests): + _match_results(json_tests, "two split cells") - def test_empty_second_qualifier(self): - self._match_results("empty second qualifier") + +def test_prd_json_accept_multi_qualifier_splits(json_tests): + _match_results(json_tests, "multi-qualifier splits") + + +def test_prd_json_accept_multi_qualifier_multi_split(json_tests): + _match_results(json_tests, "multi-qualifier multi-split") + + +def test_prd_json_accept_multi_family_split(json_tests): + _match_results(json_tests, "multi-family split") + + +def test_prd_json_accept_two_rows(json_tests): + _match_results(json_tests, "two rows") + + +def test_prd_json_accept_two_rows_implicit_timestamp(json_tests): + _match_results(json_tests, "two rows implicit timestamp") + + +def test_prd_json_accept_two_rows_empty_value(json_tests): + _match_results(json_tests, "two rows empty value") + + +def test_prd_json_accept_two_rows_one_with_multiple_cells(json_tests): + _match_results(json_tests, "two rows, one with multiple cells") + + +def test_prd_json_accept_two_rows_multiple_cells_multiple_families(json_tests): + _match_results(json_tests, "two rows, multiple cells, multiple families") + + +def test_prd_json_accept_two_rows_multiple_cells(json_tests): + _match_results(json_tests, "two rows, multiple cells") + + +def test_prd_json_accept_two_rows_four_cells_two_labels(json_tests): + _match_results(json_tests, "two rows, four cells, 2 labels") + + +def test_prd_json_accept_two_rows_with_splits_same_timestamp(json_tests): + _match_results(json_tests, "two rows with splits, same timestamp") + + +def test_prd_json_accept_no_data_after_reset(json_tests): + # JSON testcase has `"results": null` + _match_results(json_tests, "no data after reset", expected_result=[]) + + +def test_prd_json_accept_simple_reset(json_tests): + _match_results(json_tests, "simple reset") + + +def test_prd_json_accept_reset_to_new_val(json_tests): + _match_results(json_tests, "reset to new val") + + +def test_prd_json_accept_reset_to_new_qual(json_tests): + _match_results(json_tests, "reset to new qual") + + +def test_prd_json_accept_reset_with_splits(json_tests): + _match_results(json_tests, "reset with splits") + + +def test_prd_json_accept_two_resets(json_tests): + _match_results(json_tests, "two resets") + + +def test_prd_json_accept_reset_to_new_row(json_tests): + _match_results(json_tests, "reset to new row") + + +def test_prd_json_accept_reset_in_between_chunks(json_tests): + _match_results(json_tests, "reset in between chunks") + + +def test_prd_json_accept_empty_cell_chunk(json_tests): + _match_results(json_tests, "empty cell chunk") + + +def test_prd_json_accept_empty_second_qualifier(json_tests): + _match_results(json_tests, "empty second qualifier") def _flatten_cells(prd): @@ -1268,6 +1344,8 @@ def next(self): class _MockFailureIterator_1(object): def next(self): + from google.api_core.exceptions import DeadlineExceeded + raise DeadlineExceeded("Failed to read from server") __next__ = next @@ -1340,10 +1418,10 @@ def _ReadRowsResponseCellChunkPB(*args, **kw): return message -def _make_cell(value): +def _make_cell_pb(value): from google.cloud.bigtable import row_data - return row_data.Cell(value, TestCell.timestamp_micros) + return row_data.Cell(value, TIMESTAMP_MICROS) def _ReadRowsRequestPB(*args, **kw): @@ -1353,4 +1431,11 @@ def _ReadRowsRequestPB(*args, **kw): def _read_rows_retry_exception(exc): + from google.api_core.exceptions import DeadlineExceeded + return isinstance(exc, DeadlineExceeded) + + +class _Client(object): + + data_stub = None From 72b3cf23d747ec8e36949a0d3791a505627b5120 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 21 Oct 2021 12:24:31 -0400 Subject: [PATCH 13/16] tests: refactor 'row_filters' unit tests w/ pytest idioms --- tests/unit/test_row_filters.py | 1805 ++++++++++++++++---------------- 1 file changed, 880 insertions(+), 925 deletions(-) diff --git a/tests/unit/test_row_filters.py b/tests/unit/test_row_filters.py index c42345ee0..1fc3110ef 100644 --- a/tests/unit/test_row_filters.py +++ b/tests/unit/test_row_filters.py @@ -13,1047 +13,1002 @@ # limitations under the License. -import unittest +import pytest -class Test_BoolFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import _BoolFilter +def test_bool_filter_constructor(): + from google.cloud.bigtable.row_filters import _BoolFilter - return _BoolFilter + flag = object() + row_filter = _BoolFilter(flag) + assert row_filter.flag is flag - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - def test_constructor(self): - flag = object() - row_filter = self._make_one(flag) - self.assertIs(row_filter.flag, flag) +def test_bool_filter___eq__type_differ(): + from google.cloud.bigtable.row_filters import _BoolFilter - def test___eq__type_differ(self): - flag = object() - row_filter1 = self._make_one(flag) - row_filter2 = object() - self.assertNotEqual(row_filter1, row_filter2) + flag = object() + row_filter1 = _BoolFilter(flag) + row_filter2 = object() + assert not (row_filter1 == row_filter2) - def test___eq__same_value(self): - flag = object() - row_filter1 = self._make_one(flag) - row_filter2 = self._make_one(flag) - self.assertEqual(row_filter1, row_filter2) - def test___ne__same_value(self): - flag = object() - row_filter1 = self._make_one(flag) - row_filter2 = self._make_one(flag) - comparison_val = row_filter1 != row_filter2 - self.assertFalse(comparison_val) +def test_bool_filter___eq__same_value(): + from google.cloud.bigtable.row_filters import _BoolFilter + flag = object() + row_filter1 = _BoolFilter(flag) + row_filter2 = _BoolFilter(flag) + assert row_filter1 == row_filter2 -class TestSinkFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import SinkFilter - return SinkFilter +def test_bool_filter___ne__same_value(): + from google.cloud.bigtable.row_filters import _BoolFilter - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) + flag = object() + row_filter1 = _BoolFilter(flag) + row_filter2 = _BoolFilter(flag) + assert not (row_filter1 != row_filter2) - def test_to_pb(self): - flag = True - row_filter = self._make_one(flag) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(sink=flag) - self.assertEqual(pb_val, expected_pb) +def test_sink_filter_to_pb(): + from google.cloud.bigtable.row_filters import SinkFilter -class TestPassAllFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import PassAllFilter + flag = True + row_filter = SinkFilter(flag) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(sink=flag) + assert pb_val == expected_pb - return PassAllFilter - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) +def test_pass_all_filter_to_pb(): + from google.cloud.bigtable.row_filters import PassAllFilter - def test_to_pb(self): - flag = True - row_filter = self._make_one(flag) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(pass_all_filter=flag) - self.assertEqual(pb_val, expected_pb) + flag = True + row_filter = PassAllFilter(flag) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(pass_all_filter=flag) + assert pb_val == expected_pb -class TestBlockAllFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import BlockAllFilter +def test_block_all_filter_to_pb(): + from google.cloud.bigtable.row_filters import BlockAllFilter - return BlockAllFilter + flag = True + row_filter = BlockAllFilter(flag) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(block_all_filter=flag) + assert pb_val == expected_pb - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - def test_to_pb(self): - flag = True - row_filter = self._make_one(flag) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(block_all_filter=flag) - self.assertEqual(pb_val, expected_pb) +def test_regex_filterconstructor(): + from google.cloud.bigtable.row_filters import _RegexFilter + regex = b"abc" + row_filter = _RegexFilter(regex) + assert row_filter.regex is regex -class Test_RegexFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import _RegexFilter - return _RegexFilter +def test_regex_filterconstructor_non_bytes(): + from google.cloud.bigtable.row_filters import _RegexFilter - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) + regex = u"abc" + row_filter = _RegexFilter(regex) + assert row_filter.regex == b"abc" - def test_constructor(self): - regex = b"abc" - row_filter = self._make_one(regex) - self.assertIs(row_filter.regex, regex) - def test_constructor_non_bytes(self): - regex = u"abc" - row_filter = self._make_one(regex) - self.assertEqual(row_filter.regex, b"abc") +def test_regex_filter__eq__type_differ(): + from google.cloud.bigtable.row_filters import _RegexFilter - def test___eq__type_differ(self): - regex = b"def-rgx" - row_filter1 = self._make_one(regex) - row_filter2 = object() - self.assertNotEqual(row_filter1, row_filter2) + regex = b"def-rgx" + row_filter1 = _RegexFilter(regex) + row_filter2 = object() + assert not (row_filter1 == row_filter2) - def test___eq__same_value(self): - regex = b"trex-regex" - row_filter1 = self._make_one(regex) - row_filter2 = self._make_one(regex) - self.assertEqual(row_filter1, row_filter2) - def test___ne__same_value(self): - regex = b"abc" - row_filter1 = self._make_one(regex) - row_filter2 = self._make_one(regex) - comparison_val = row_filter1 != row_filter2 - self.assertFalse(comparison_val) +def test_regex_filter__eq__same_value(): + from google.cloud.bigtable.row_filters import _RegexFilter + regex = b"trex-regex" + row_filter1 = _RegexFilter(regex) + row_filter2 = _RegexFilter(regex) + assert row_filter1 == row_filter2 -class TestRowKeyRegexFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import RowKeyRegexFilter - return RowKeyRegexFilter +def test_regex_filter__ne__same_value(): + from google.cloud.bigtable.row_filters import _RegexFilter - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) + regex = b"abc" + row_filter1 = _RegexFilter(regex) + row_filter2 = _RegexFilter(regex) + assert not (row_filter1 != row_filter2) - def test_to_pb(self): - regex = b"row-key-regex" - row_filter = self._make_one(regex) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(row_key_regex_filter=regex) - self.assertEqual(pb_val, expected_pb) - - -class TestRowSampleFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import RowSampleFilter - return RowSampleFilter - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor(self): - sample = object() - row_filter = self._make_one(sample) - self.assertIs(row_filter.sample, sample) - - def test___eq__type_differ(self): - sample = object() - row_filter1 = self._make_one(sample) - row_filter2 = object() - self.assertNotEqual(row_filter1, row_filter2) - - def test___eq__same_value(self): - sample = object() - row_filter1 = self._make_one(sample) - row_filter2 = self._make_one(sample) - self.assertEqual(row_filter1, row_filter2) - - def test_to_pb(self): - sample = 0.25 - row_filter = self._make_one(sample) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(row_sample_filter=sample) - self.assertEqual(pb_val, expected_pb) - - -class TestFamilyNameRegexFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import FamilyNameRegexFilter - - return FamilyNameRegexFilter - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_to_pb(self): - regex = u"family-regex" - row_filter = self._make_one(regex) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(family_name_regex_filter=regex) - self.assertEqual(pb_val, expected_pb) - - -class TestColumnQualifierRegexFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import ColumnQualifierRegexFilter - - return ColumnQualifierRegexFilter - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_to_pb(self): - regex = b"column-regex" - row_filter = self._make_one(regex) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(column_qualifier_regex_filter=regex) - self.assertEqual(pb_val, expected_pb) - - -class TestTimestampRange(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import TimestampRange - - return TimestampRange - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor(self): - start = object() - end = object() - time_range = self._make_one(start=start, end=end) - self.assertIs(time_range.start, start) - self.assertIs(time_range.end, end) - - def test___eq__(self): - start = object() - end = object() - time_range1 = self._make_one(start=start, end=end) - time_range2 = self._make_one(start=start, end=end) - self.assertEqual(time_range1, time_range2) - - def test___eq__type_differ(self): - start = object() - end = object() - time_range1 = self._make_one(start=start, end=end) - time_range2 = object() - self.assertNotEqual(time_range1, time_range2) - - def test___ne__same_value(self): - start = object() - end = object() - time_range1 = self._make_one(start=start, end=end) - time_range2 = self._make_one(start=start, end=end) - comparison_val = time_range1 != time_range2 - self.assertFalse(comparison_val) - - def _to_pb_helper(self, pb_kwargs, start=None, end=None): - import datetime - from google.cloud._helpers import _EPOCH - - if start is not None: - start = _EPOCH + datetime.timedelta(microseconds=start) - if end is not None: - end = _EPOCH + datetime.timedelta(microseconds=end) - time_range = self._make_one(start=start, end=end) - expected_pb = _TimestampRangePB(**pb_kwargs) - time_pb = time_range.to_pb() - self.assertEqual( - time_pb.start_timestamp_micros, expected_pb.start_timestamp_micros - ) - self.assertEqual(time_pb.end_timestamp_micros, expected_pb.end_timestamp_micros) - self.assertEqual(time_pb, expected_pb) - - def test_to_pb(self): - start_micros = 30871234 - end_micros = 12939371234 - start_millis = start_micros // 1000 * 1000 - self.assertEqual(start_millis, 30871000) - end_millis = end_micros // 1000 * 1000 + 1000 - self.assertEqual(end_millis, 12939372000) - pb_kwargs = {} - pb_kwargs["start_timestamp_micros"] = start_millis - pb_kwargs["end_timestamp_micros"] = end_millis - self._to_pb_helper(pb_kwargs, start=start_micros, end=end_micros) - - def test_to_pb_start_only(self): - # Makes sure already milliseconds granularity - start_micros = 30871000 - start_millis = start_micros // 1000 * 1000 - self.assertEqual(start_millis, 30871000) - pb_kwargs = {} - pb_kwargs["start_timestamp_micros"] = start_millis - self._to_pb_helper(pb_kwargs, start=start_micros, end=None) - - def test_to_pb_end_only(self): - # Makes sure already milliseconds granularity - end_micros = 12939371000 - end_millis = end_micros // 1000 * 1000 - self.assertEqual(end_millis, 12939371000) - pb_kwargs = {} - pb_kwargs["end_timestamp_micros"] = end_millis - self._to_pb_helper(pb_kwargs, start=None, end=end_micros) - - -class TestTimestampRangeFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import TimestampRangeFilter - - return TimestampRangeFilter - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor(self): - range_ = object() - row_filter = self._make_one(range_) - self.assertIs(row_filter.range_, range_) - - def test___eq__type_differ(self): - range_ = object() - row_filter1 = self._make_one(range_) - row_filter2 = object() - self.assertNotEqual(row_filter1, row_filter2) - - def test___eq__same_value(self): - range_ = object() - row_filter1 = self._make_one(range_) - row_filter2 = self._make_one(range_) - self.assertEqual(row_filter1, row_filter2) - - def test_to_pb(self): - from google.cloud.bigtable.row_filters import TimestampRange - - range_ = TimestampRange() - row_filter = self._make_one(range_) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(timestamp_range_filter=_TimestampRangePB()) - self.assertEqual(pb_val, expected_pb) - - -class TestColumnRangeFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import ColumnRangeFilter - - return ColumnRangeFilter - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor_defaults(self): - column_family_id = object() - row_filter = self._make_one(column_family_id) - self.assertIs(row_filter.column_family_id, column_family_id) - self.assertIsNone(row_filter.start_column) - self.assertIsNone(row_filter.end_column) - self.assertTrue(row_filter.inclusive_start) - self.assertTrue(row_filter.inclusive_end) - - def test_constructor_explicit(self): - column_family_id = object() - start_column = object() - end_column = object() - inclusive_start = object() - inclusive_end = object() - row_filter = self._make_one( - column_family_id, - start_column=start_column, - end_column=end_column, - inclusive_start=inclusive_start, - inclusive_end=inclusive_end, - ) - self.assertIs(row_filter.column_family_id, column_family_id) - self.assertIs(row_filter.start_column, start_column) - self.assertIs(row_filter.end_column, end_column) - self.assertIs(row_filter.inclusive_start, inclusive_start) - self.assertIs(row_filter.inclusive_end, inclusive_end) - - def test_constructor_bad_start(self): - column_family_id = object() - self.assertRaises( - ValueError, self._make_one, column_family_id, inclusive_start=True - ) +def test_row_key_regex_filter_to_pb(): + from google.cloud.bigtable.row_filters import RowKeyRegexFilter - def test_constructor_bad_end(self): - column_family_id = object() - self.assertRaises( - ValueError, self._make_one, column_family_id, inclusive_end=True - ) + regex = b"row-key-regex" + row_filter = RowKeyRegexFilter(regex) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(row_key_regex_filter=regex) + assert pb_val == expected_pb - def test___eq__(self): - column_family_id = object() - start_column = object() - end_column = object() - inclusive_start = object() - inclusive_end = object() - row_filter1 = self._make_one( - column_family_id, - start_column=start_column, - end_column=end_column, - inclusive_start=inclusive_start, - inclusive_end=inclusive_end, - ) - row_filter2 = self._make_one( - column_family_id, - start_column=start_column, - end_column=end_column, - inclusive_start=inclusive_start, - inclusive_end=inclusive_end, - ) - self.assertEqual(row_filter1, row_filter2) - - def test___eq__type_differ(self): - column_family_id = object() - row_filter1 = self._make_one(column_family_id) - row_filter2 = object() - self.assertNotEqual(row_filter1, row_filter2) - - def test_to_pb(self): - column_family_id = u"column-family-id" - row_filter = self._make_one(column_family_id) - col_range_pb = _ColumnRangePB(family_name=column_family_id) - expected_pb = _RowFilterPB(column_range_filter=col_range_pb) - self.assertEqual(row_filter.to_pb(), expected_pb) - - def test_to_pb_inclusive_start(self): - column_family_id = u"column-family-id" - column = b"column" - row_filter = self._make_one(column_family_id, start_column=column) - col_range_pb = _ColumnRangePB( - family_name=column_family_id, start_qualifier_closed=column - ) - expected_pb = _RowFilterPB(column_range_filter=col_range_pb) - self.assertEqual(row_filter.to_pb(), expected_pb) - - def test_to_pb_exclusive_start(self): - column_family_id = u"column-family-id" - column = b"column" - row_filter = self._make_one( - column_family_id, start_column=column, inclusive_start=False - ) - col_range_pb = _ColumnRangePB( - family_name=column_family_id, start_qualifier_open=column - ) - expected_pb = _RowFilterPB(column_range_filter=col_range_pb) - self.assertEqual(row_filter.to_pb(), expected_pb) - - def test_to_pb_inclusive_end(self): - column_family_id = u"column-family-id" - column = b"column" - row_filter = self._make_one(column_family_id, end_column=column) - col_range_pb = _ColumnRangePB( - family_name=column_family_id, end_qualifier_closed=column - ) - expected_pb = _RowFilterPB(column_range_filter=col_range_pb) - self.assertEqual(row_filter.to_pb(), expected_pb) - - def test_to_pb_exclusive_end(self): - column_family_id = u"column-family-id" - column = b"column" - row_filter = self._make_one( - column_family_id, end_column=column, inclusive_end=False - ) - col_range_pb = _ColumnRangePB( - family_name=column_family_id, end_qualifier_open=column - ) - expected_pb = _RowFilterPB(column_range_filter=col_range_pb) - self.assertEqual(row_filter.to_pb(), expected_pb) - - -class TestValueRegexFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import ValueRegexFilter - - return ValueRegexFilter - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_to_pb_w_bytes(self): - value = regex = b"value-regex" - row_filter = self._make_one(value) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(value_regex_filter=regex) - self.assertEqual(pb_val, expected_pb) - - def test_to_pb_w_str(self): - value = u"value-regex" - regex = value.encode("ascii") - row_filter = self._make_one(value) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(value_regex_filter=regex) - self.assertEqual(pb_val, expected_pb) - - -class TestExactValueFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import ExactValueFilter - - return ExactValueFilter - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_to_pb_w_bytes(self): - value = regex = b"value-regex" - row_filter = self._make_one(value) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(value_regex_filter=regex) - self.assertEqual(pb_val, expected_pb) - - def test_to_pb_w_str(self): - value = u"value-regex" - regex = value.encode("ascii") - row_filter = self._make_one(value) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(value_regex_filter=regex) - self.assertEqual(pb_val, expected_pb) - - def test_to_pb_w_int(self): - import struct - - value = 1 - regex = struct.Struct(">q").pack(value) - row_filter = self._make_one(value) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(value_regex_filter=regex) - self.assertEqual(pb_val, expected_pb) - - -class TestValueRangeFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import ValueRangeFilter - - return ValueRangeFilter - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor_defaults(self): - row_filter = self._make_one() - - self.assertIsNone(row_filter.start_value) - self.assertIsNone(row_filter.end_value) - self.assertTrue(row_filter.inclusive_start) - self.assertTrue(row_filter.inclusive_end) - - def test_constructor_explicit(self): - start_value = object() - end_value = object() - inclusive_start = object() - inclusive_end = object() - - row_filter = self._make_one( - start_value=start_value, - end_value=end_value, - inclusive_start=inclusive_start, - inclusive_end=inclusive_end, - ) - self.assertIs(row_filter.start_value, start_value) - self.assertIs(row_filter.end_value, end_value) - self.assertIs(row_filter.inclusive_start, inclusive_start) - self.assertIs(row_filter.inclusive_end, inclusive_end) - - def test_constructor_w_int_values(self): - import struct - - start_value = 1 - end_value = 10 - - row_filter = self._make_one(start_value=start_value, end_value=end_value) - - expected_start_value = struct.Struct(">q").pack(start_value) - expected_end_value = struct.Struct(">q").pack(end_value) - - self.assertEqual(row_filter.start_value, expected_start_value) - self.assertEqual(row_filter.end_value, expected_end_value) - self.assertTrue(row_filter.inclusive_start) - self.assertTrue(row_filter.inclusive_end) - - def test_constructor_bad_start(self): - with self.assertRaises(ValueError): - self._make_one(inclusive_start=True) - - def test_constructor_bad_end(self): - with self.assertRaises(ValueError): - self._make_one(inclusive_end=True) - - def test___eq__(self): - start_value = object() - end_value = object() - inclusive_start = object() - inclusive_end = object() - row_filter1 = self._make_one( - start_value=start_value, - end_value=end_value, - inclusive_start=inclusive_start, - inclusive_end=inclusive_end, - ) - row_filter2 = self._make_one( - start_value=start_value, - end_value=end_value, - inclusive_start=inclusive_start, - inclusive_end=inclusive_end, - ) - self.assertEqual(row_filter1, row_filter2) - - def test___eq__type_differ(self): - row_filter1 = self._make_one() - row_filter2 = object() - self.assertNotEqual(row_filter1, row_filter2) - - def test_to_pb(self): - row_filter = self._make_one() - expected_pb = _RowFilterPB(value_range_filter=_ValueRangePB()) - self.assertEqual(row_filter.to_pb(), expected_pb) - - def test_to_pb_inclusive_start(self): - value = b"some-value" - row_filter = self._make_one(start_value=value) - val_range_pb = _ValueRangePB(start_value_closed=value) - expected_pb = _RowFilterPB(value_range_filter=val_range_pb) - self.assertEqual(row_filter.to_pb(), expected_pb) - - def test_to_pb_exclusive_start(self): - value = b"some-value" - row_filter = self._make_one(start_value=value, inclusive_start=False) - val_range_pb = _ValueRangePB(start_value_open=value) - expected_pb = _RowFilterPB(value_range_filter=val_range_pb) - self.assertEqual(row_filter.to_pb(), expected_pb) - - def test_to_pb_inclusive_end(self): - value = b"some-value" - row_filter = self._make_one(end_value=value) - val_range_pb = _ValueRangePB(end_value_closed=value) - expected_pb = _RowFilterPB(value_range_filter=val_range_pb) - self.assertEqual(row_filter.to_pb(), expected_pb) - - def test_to_pb_exclusive_end(self): - value = b"some-value" - row_filter = self._make_one(end_value=value, inclusive_end=False) - val_range_pb = _ValueRangePB(end_value_open=value) - expected_pb = _RowFilterPB(value_range_filter=val_range_pb) - self.assertEqual(row_filter.to_pb(), expected_pb) - - -class Test_CellCountFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import _CellCountFilter - - return _CellCountFilter - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor(self): - num_cells = object() - row_filter = self._make_one(num_cells) - self.assertIs(row_filter.num_cells, num_cells) - - def test___eq__type_differ(self): - num_cells = object() - row_filter1 = self._make_one(num_cells) - row_filter2 = object() - self.assertNotEqual(row_filter1, row_filter2) - - def test___eq__same_value(self): - num_cells = object() - row_filter1 = self._make_one(num_cells) - row_filter2 = self._make_one(num_cells) - self.assertEqual(row_filter1, row_filter2) - - def test___ne__same_value(self): - num_cells = object() - row_filter1 = self._make_one(num_cells) - row_filter2 = self._make_one(num_cells) - comparison_val = row_filter1 != row_filter2 - self.assertFalse(comparison_val) - - -class TestCellsRowOffsetFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import CellsRowOffsetFilter - - return CellsRowOffsetFilter - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_to_pb(self): - num_cells = 76 - row_filter = self._make_one(num_cells) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(cells_per_row_offset_filter=num_cells) - self.assertEqual(pb_val, expected_pb) - - -class TestCellsRowLimitFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import CellsRowLimitFilter - - return CellsRowLimitFilter - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_to_pb(self): - num_cells = 189 - row_filter = self._make_one(num_cells) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(cells_per_row_limit_filter=num_cells) - self.assertEqual(pb_val, expected_pb) - - -class TestCellsColumnLimitFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import CellsColumnLimitFilter - - return CellsColumnLimitFilter - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) +def test_row_sample_filter_constructor(): + from google.cloud.bigtable.row_filters import RowSampleFilter - def test_to_pb(self): - num_cells = 10 - row_filter = self._make_one(num_cells) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(cells_per_column_limit_filter=num_cells) - self.assertEqual(pb_val, expected_pb) + sample = object() + row_filter = RowSampleFilter(sample) + assert row_filter.sample is sample -class TestStripValueTransformerFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import StripValueTransformerFilter +def test_row_sample_filter___eq__type_differ(): + from google.cloud.bigtable.row_filters import RowSampleFilter - return StripValueTransformerFilter + sample = object() + row_filter1 = RowSampleFilter(sample) + row_filter2 = object() + assert not (row_filter1 == row_filter2) - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - def test_to_pb(self): - flag = True - row_filter = self._make_one(flag) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(strip_value_transformer=flag) - self.assertEqual(pb_val, expected_pb) +def test_row_sample_filter___eq__same_value(): + from google.cloud.bigtable.row_filters import RowSampleFilter + sample = object() + row_filter1 = RowSampleFilter(sample) + row_filter2 = RowSampleFilter(sample) + assert row_filter1 == row_filter2 -class TestApplyLabelFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import ApplyLabelFilter - return ApplyLabelFilter +def test_row_sample_filter_to_pb(): + from google.cloud.bigtable.row_filters import RowSampleFilter - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) + sample = 0.25 + row_filter = RowSampleFilter(sample) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(row_sample_filter=sample) + assert pb_val == expected_pb - def test_constructor(self): - label = object() - row_filter = self._make_one(label) - self.assertIs(row_filter.label, label) - def test___eq__type_differ(self): - label = object() - row_filter1 = self._make_one(label) - row_filter2 = object() - self.assertNotEqual(row_filter1, row_filter2) +def test_family_name_regex_filter_to_pb(): + from google.cloud.bigtable.row_filters import FamilyNameRegexFilter - def test___eq__same_value(self): - label = object() - row_filter1 = self._make_one(label) - row_filter2 = self._make_one(label) - self.assertEqual(row_filter1, row_filter2) + regex = u"family-regex" + row_filter = FamilyNameRegexFilter(regex) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(family_name_regex_filter=regex) + assert pb_val == expected_pb - def test_to_pb(self): - label = u"label" - row_filter = self._make_one(label) - pb_val = row_filter.to_pb() - expected_pb = _RowFilterPB(apply_label_transformer=label) - self.assertEqual(pb_val, expected_pb) +def test_column_qualifier_regext_filter_to_pb(): + from google.cloud.bigtable.row_filters import ColumnQualifierRegexFilter -class Test_FilterCombination(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import _FilterCombination + regex = b"column-regex" + row_filter = ColumnQualifierRegexFilter(regex) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(column_qualifier_regex_filter=regex) + assert pb_val == expected_pb - return _FilterCombination - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) +def test_timestamp_range_constructor(): + from google.cloud.bigtable.row_filters import TimestampRange - def test_constructor_defaults(self): - row_filter = self._make_one() - self.assertEqual(row_filter.filters, []) + start = object() + end = object() + time_range = TimestampRange(start=start, end=end) + assert time_range.start is start + assert time_range.end is end - def test_constructor_explicit(self): - filters = object() - row_filter = self._make_one(filters=filters) - self.assertIs(row_filter.filters, filters) - def test___eq__(self): - filters = object() - row_filter1 = self._make_one(filters=filters) - row_filter2 = self._make_one(filters=filters) - self.assertEqual(row_filter1, row_filter2) +def test_timestamp_range___eq__(): + from google.cloud.bigtable.row_filters import TimestampRange - def test___eq__type_differ(self): - filters = object() - row_filter1 = self._make_one(filters=filters) - row_filter2 = object() - self.assertNotEqual(row_filter1, row_filter2) + start = object() + end = object() + time_range1 = TimestampRange(start=start, end=end) + time_range2 = TimestampRange(start=start, end=end) + assert time_range1 == time_range2 -class TestRowFilterChain(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import RowFilterChain +def test_timestamp_range___eq__type_differ(): + from google.cloud.bigtable.row_filters import TimestampRange - return RowFilterChain + start = object() + end = object() + time_range1 = TimestampRange(start=start, end=end) + time_range2 = object() + assert not (time_range1 == time_range2) - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - def test_to_pb(self): - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable.row_filters import StripValueTransformerFilter +def test_timestamp_range___ne__same_value(): + from google.cloud.bigtable.row_filters import TimestampRange - row_filter1 = StripValueTransformerFilter(True) - row_filter1_pb = row_filter1.to_pb() + start = object() + end = object() + time_range1 = TimestampRange(start=start, end=end) + time_range2 = TimestampRange(start=start, end=end) + assert not (time_range1 != time_range2) - row_filter2 = RowSampleFilter(0.25) - row_filter2_pb = row_filter2.to_pb() - row_filter3 = self._make_one(filters=[row_filter1, row_filter2]) - filter_pb = row_filter3.to_pb() +def _timestamp_range_to_pb_helper(pb_kwargs, start=None, end=None): + import datetime + from google.cloud._helpers import _EPOCH + from google.cloud.bigtable.row_filters import TimestampRange - expected_pb = _RowFilterPB( - chain=_RowFilterChainPB(filters=[row_filter1_pb, row_filter2_pb]) - ) - self.assertEqual(filter_pb, expected_pb) + if start is not None: + start = _EPOCH + datetime.timedelta(microseconds=start) + if end is not None: + end = _EPOCH + datetime.timedelta(microseconds=end) + time_range = TimestampRange(start=start, end=end) + expected_pb = _TimestampRangePB(**pb_kwargs) + time_pb = time_range.to_pb() + assert time_pb.start_timestamp_micros == expected_pb.start_timestamp_micros + assert time_pb.end_timestamp_micros == expected_pb.end_timestamp_micros + assert time_pb == expected_pb - def test_to_pb_nested(self): - from google.cloud.bigtable.row_filters import CellsRowLimitFilter - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable.row_filters import StripValueTransformerFilter - row_filter1 = StripValueTransformerFilter(True) - row_filter2 = RowSampleFilter(0.25) +def test_timestamp_range_to_pb(): + start_micros = 30871234 + end_micros = 12939371234 + start_millis = start_micros // 1000 * 1000 + assert start_millis == 30871000 + end_millis = end_micros // 1000 * 1000 + 1000 + assert end_millis == 12939372000 + pb_kwargs = {} + pb_kwargs["start_timestamp_micros"] = start_millis + pb_kwargs["end_timestamp_micros"] = end_millis + _timestamp_range_to_pb_helper(pb_kwargs, start=start_micros, end=end_micros) - row_filter3 = self._make_one(filters=[row_filter1, row_filter2]) - row_filter3_pb = row_filter3.to_pb() - row_filter4 = CellsRowLimitFilter(11) - row_filter4_pb = row_filter4.to_pb() +def test_timestamp_range_to_pb_start_only(): + # Makes sure already milliseconds granularity + start_micros = 30871000 + start_millis = start_micros // 1000 * 1000 + assert start_millis == 30871000 + pb_kwargs = {} + pb_kwargs["start_timestamp_micros"] = start_millis + _timestamp_range_to_pb_helper(pb_kwargs, start=start_micros, end=None) - row_filter5 = self._make_one(filters=[row_filter3, row_filter4]) - filter_pb = row_filter5.to_pb() - expected_pb = _RowFilterPB( - chain=_RowFilterChainPB(filters=[row_filter3_pb, row_filter4_pb]) - ) - self.assertEqual(filter_pb, expected_pb) +def test_timestamp_range_to_pb_end_only(): + # Makes sure already milliseconds granularity + end_micros = 12939371000 + end_millis = end_micros // 1000 * 1000 + assert end_millis == 12939371000 + pb_kwargs = {} + pb_kwargs["end_timestamp_micros"] = end_millis + _timestamp_range_to_pb_helper(pb_kwargs, start=None, end=end_micros) -class TestRowFilterUnion(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import RowFilterUnion +def test_timestamp_range_filter_constructor(): + from google.cloud.bigtable.row_filters import TimestampRangeFilter + + range_ = object() + row_filter = TimestampRangeFilter(range_) + assert row_filter.range_ is range_ + + +def test_timestamp_range_filter___eq__type_differ(): + from google.cloud.bigtable.row_filters import TimestampRangeFilter + + range_ = object() + row_filter1 = TimestampRangeFilter(range_) + row_filter2 = object() + assert not (row_filter1 == row_filter2) + + +def test_timestamp_range_filter___eq__same_value(): + from google.cloud.bigtable.row_filters import TimestampRangeFilter + + range_ = object() + row_filter1 = TimestampRangeFilter(range_) + row_filter2 = TimestampRangeFilter(range_) + assert row_filter1 == row_filter2 + + +def test_timestamp_range_filter_to_pb(): + from google.cloud.bigtable.row_filters import TimestampRangeFilter + from google.cloud.bigtable.row_filters import TimestampRange + + range_ = TimestampRange() + row_filter = TimestampRangeFilter(range_) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(timestamp_range_filter=_TimestampRangePB()) + assert pb_val == expected_pb + + +def test_column_range_filter_constructor_defaults(): + from google.cloud.bigtable.row_filters import ColumnRangeFilter + + column_family_id = object() + row_filter = ColumnRangeFilter(column_family_id) + assert row_filter.column_family_id is column_family_id + assert row_filter.start_column is None + assert row_filter.end_column is None + assert row_filter.inclusive_start + assert row_filter.inclusive_end + + +def test_column_range_filter_constructor_explicit(): + from google.cloud.bigtable.row_filters import ColumnRangeFilter + + column_family_id = object() + start_column = object() + end_column = object() + inclusive_start = object() + inclusive_end = object() + row_filter = ColumnRangeFilter( + column_family_id, + start_column=start_column, + end_column=end_column, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end, + ) + assert row_filter.column_family_id is column_family_id + assert row_filter.start_column is start_column + assert row_filter.end_column is end_column + assert row_filter.inclusive_start is inclusive_start + assert row_filter.inclusive_end is inclusive_end + + +def test_column_range_filter_constructor_bad_start(): + from google.cloud.bigtable.row_filters import ColumnRangeFilter + + column_family_id = object() + with pytest.raises(ValueError): + ColumnRangeFilter(column_family_id, inclusive_start=True) + + +def test_column_range_filter_constructor_bad_end(): + from google.cloud.bigtable.row_filters import ColumnRangeFilter + + column_family_id = object() + with pytest.raises(ValueError): + ColumnRangeFilter(column_family_id, inclusive_end=True) + + +def test_column_range_filter___eq__(): + from google.cloud.bigtable.row_filters import ColumnRangeFilter + + column_family_id = object() + start_column = object() + end_column = object() + inclusive_start = object() + inclusive_end = object() + row_filter1 = ColumnRangeFilter( + column_family_id, + start_column=start_column, + end_column=end_column, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end, + ) + row_filter2 = ColumnRangeFilter( + column_family_id, + start_column=start_column, + end_column=end_column, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end, + ) + assert row_filter1 == row_filter2 + + +def test_column_range_filter___eq__type_differ(): + from google.cloud.bigtable.row_filters import ColumnRangeFilter + + column_family_id = object() + row_filter1 = ColumnRangeFilter(column_family_id) + row_filter2 = object() + assert not (row_filter1 == row_filter2) + + +def test_column_range_filter_to_pb(): + from google.cloud.bigtable.row_filters import ColumnRangeFilter + + column_family_id = u"column-family-id" + row_filter = ColumnRangeFilter(column_family_id) + col_range_pb = _ColumnRangePB(family_name=column_family_id) + expected_pb = _RowFilterPB(column_range_filter=col_range_pb) + assert row_filter.to_pb() == expected_pb + + +def test_column_range_filter_to_pb_inclusive_start(): + from google.cloud.bigtable.row_filters import ColumnRangeFilter + + column_family_id = u"column-family-id" + column = b"column" + row_filter = ColumnRangeFilter(column_family_id, start_column=column) + col_range_pb = _ColumnRangePB( + family_name=column_family_id, start_qualifier_closed=column + ) + expected_pb = _RowFilterPB(column_range_filter=col_range_pb) + assert row_filter.to_pb() == expected_pb + + +def test_column_range_filter_to_pb_exclusive_start(): + from google.cloud.bigtable.row_filters import ColumnRangeFilter + + column_family_id = u"column-family-id" + column = b"column" + row_filter = ColumnRangeFilter( + column_family_id, start_column=column, inclusive_start=False + ) + col_range_pb = _ColumnRangePB( + family_name=column_family_id, start_qualifier_open=column + ) + expected_pb = _RowFilterPB(column_range_filter=col_range_pb) + assert row_filter.to_pb() == expected_pb + + +def test_column_range_filter_to_pb_inclusive_end(): + from google.cloud.bigtable.row_filters import ColumnRangeFilter - return RowFilterUnion + column_family_id = u"column-family-id" + column = b"column" + row_filter = ColumnRangeFilter(column_family_id, end_column=column) + col_range_pb = _ColumnRangePB( + family_name=column_family_id, end_qualifier_closed=column + ) + expected_pb = _RowFilterPB(column_range_filter=col_range_pb) + assert row_filter.to_pb() == expected_pb - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - def test_to_pb(self): - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable.row_filters import StripValueTransformerFilter +def test_column_range_filter_to_pb_exclusive_end(): + from google.cloud.bigtable.row_filters import ColumnRangeFilter - row_filter1 = StripValueTransformerFilter(True) - row_filter1_pb = row_filter1.to_pb() + column_family_id = u"column-family-id" + column = b"column" + row_filter = ColumnRangeFilter( + column_family_id, end_column=column, inclusive_end=False + ) + col_range_pb = _ColumnRangePB( + family_name=column_family_id, end_qualifier_open=column + ) + expected_pb = _RowFilterPB(column_range_filter=col_range_pb) + assert row_filter.to_pb() == expected_pb - row_filter2 = RowSampleFilter(0.25) - row_filter2_pb = row_filter2.to_pb() - row_filter3 = self._make_one(filters=[row_filter1, row_filter2]) - filter_pb = row_filter3.to_pb() +def test_value_regex_filter_to_pb_w_bytes(): + from google.cloud.bigtable.row_filters import ValueRegexFilter - expected_pb = _RowFilterPB( - interleave=_RowFilterInterleavePB(filters=[row_filter1_pb, row_filter2_pb]) - ) - self.assertEqual(filter_pb, expected_pb) + value = regex = b"value-regex" + row_filter = ValueRegexFilter(value) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(value_regex_filter=regex) + assert pb_val == expected_pb - def test_to_pb_nested(self): - from google.cloud.bigtable.row_filters import CellsRowLimitFilter - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable.row_filters import StripValueTransformerFilter - row_filter1 = StripValueTransformerFilter(True) - row_filter2 = RowSampleFilter(0.25) +def test_value_regex_filter_to_pb_w_str(): + from google.cloud.bigtable.row_filters import ValueRegexFilter - row_filter3 = self._make_one(filters=[row_filter1, row_filter2]) - row_filter3_pb = row_filter3.to_pb() + value = u"value-regex" + regex = value.encode("ascii") + row_filter = ValueRegexFilter(value) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(value_regex_filter=regex) + assert pb_val == expected_pb - row_filter4 = CellsRowLimitFilter(11) - row_filter4_pb = row_filter4.to_pb() - row_filter5 = self._make_one(filters=[row_filter3, row_filter4]) - filter_pb = row_filter5.to_pb() +def test_exact_value_filter_to_pb_w_bytes(): + from google.cloud.bigtable.row_filters import ExactValueFilter - expected_pb = _RowFilterPB( - interleave=_RowFilterInterleavePB(filters=[row_filter3_pb, row_filter4_pb]) - ) - self.assertEqual(filter_pb, expected_pb) + value = regex = b"value-regex" + row_filter = ExactValueFilter(value) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(value_regex_filter=regex) + assert pb_val == expected_pb -class TestConditionalRowFilter(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_filters import ConditionalRowFilter +def test_exact_value_filter_to_pb_w_str(): + from google.cloud.bigtable.row_filters import ExactValueFilter - return ConditionalRowFilter + value = u"value-regex" + regex = value.encode("ascii") + row_filter = ExactValueFilter(value) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(value_regex_filter=regex) + assert pb_val == expected_pb - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - def test_constructor(self): - base_filter = object() - true_filter = object() - false_filter = object() - cond_filter = self._make_one( - base_filter, true_filter=true_filter, false_filter=false_filter - ) - self.assertIs(cond_filter.base_filter, base_filter) - self.assertIs(cond_filter.true_filter, true_filter) - self.assertIs(cond_filter.false_filter, false_filter) - - def test___eq__(self): - base_filter = object() - true_filter = object() - false_filter = object() - cond_filter1 = self._make_one( - base_filter, true_filter=true_filter, false_filter=false_filter - ) - cond_filter2 = self._make_one( - base_filter, true_filter=true_filter, false_filter=false_filter - ) - self.assertEqual(cond_filter1, cond_filter2) - - def test___eq__type_differ(self): - base_filter = object() - true_filter = object() - false_filter = object() - cond_filter1 = self._make_one( - base_filter, true_filter=true_filter, false_filter=false_filter - ) - cond_filter2 = object() - self.assertNotEqual(cond_filter1, cond_filter2) +def test_exact_value_filter_to_pb_w_int(): + import struct + from google.cloud.bigtable.row_filters import ExactValueFilter - def test_to_pb(self): - from google.cloud.bigtable.row_filters import CellsRowOffsetFilter - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable.row_filters import StripValueTransformerFilter + value = 1 + regex = struct.Struct(">q").pack(value) + row_filter = ExactValueFilter(value) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(value_regex_filter=regex) + assert pb_val == expected_pb - row_filter1 = StripValueTransformerFilter(True) - row_filter1_pb = row_filter1.to_pb() - row_filter2 = RowSampleFilter(0.25) - row_filter2_pb = row_filter2.to_pb() +def test_value_range_filter_constructor_defaults(): + from google.cloud.bigtable.row_filters import ValueRangeFilter - row_filter3 = CellsRowOffsetFilter(11) - row_filter3_pb = row_filter3.to_pb() + row_filter = ValueRangeFilter() - row_filter4 = self._make_one( - row_filter1, true_filter=row_filter2, false_filter=row_filter3 - ) - filter_pb = row_filter4.to_pb() - - expected_pb = _RowFilterPB( - condition=_RowFilterConditionPB( - predicate_filter=row_filter1_pb, - true_filter=row_filter2_pb, - false_filter=row_filter3_pb, - ) + assert row_filter.start_value is None + assert row_filter.end_value is None + assert row_filter.inclusive_start + assert row_filter.inclusive_end + + +def test_value_range_filter_constructor_explicit(): + from google.cloud.bigtable.row_filters import ValueRangeFilter + + start_value = object() + end_value = object() + inclusive_start = object() + inclusive_end = object() + + row_filter = ValueRangeFilter( + start_value=start_value, + end_value=end_value, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end, + ) + + assert row_filter.start_value is start_value + assert row_filter.end_value is end_value + assert row_filter.inclusive_start is inclusive_start + assert row_filter.inclusive_end is inclusive_end + + +def test_value_range_filter_constructor_w_int_values(): + from google.cloud.bigtable.row_filters import ValueRangeFilter + import struct + + start_value = 1 + end_value = 10 + + row_filter = ValueRangeFilter(start_value=start_value, end_value=end_value) + + expected_start_value = struct.Struct(">q").pack(start_value) + expected_end_value = struct.Struct(">q").pack(end_value) + + assert row_filter.start_value == expected_start_value + assert row_filter.end_value == expected_end_value + assert row_filter.inclusive_start + assert row_filter.inclusive_end + + +def test_value_range_filter_constructor_bad_start(): + from google.cloud.bigtable.row_filters import ValueRangeFilter + + with pytest.raises(ValueError): + ValueRangeFilter(inclusive_start=True) + + +def test_value_range_filter_constructor_bad_end(): + from google.cloud.bigtable.row_filters import ValueRangeFilter + + with pytest.raises(ValueError): + ValueRangeFilter(inclusive_end=True) + + +def test_value_range_filter___eq__(): + from google.cloud.bigtable.row_filters import ValueRangeFilter + + start_value = object() + end_value = object() + inclusive_start = object() + inclusive_end = object() + row_filter1 = ValueRangeFilter( + start_value=start_value, + end_value=end_value, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end, + ) + row_filter2 = ValueRangeFilter( + start_value=start_value, + end_value=end_value, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end, + ) + assert row_filter1 == row_filter2 + + +def test_value_range_filter___eq__type_differ(): + from google.cloud.bigtable.row_filters import ValueRangeFilter + + row_filter1 = ValueRangeFilter() + row_filter2 = object() + assert not (row_filter1 == row_filter2) + + +def test_value_range_filter_to_pb(): + from google.cloud.bigtable.row_filters import ValueRangeFilter + + row_filter = ValueRangeFilter() + expected_pb = _RowFilterPB(value_range_filter=_ValueRangePB()) + assert row_filter.to_pb() == expected_pb + + +def test_value_range_filter_to_pb_inclusive_start(): + from google.cloud.bigtable.row_filters import ValueRangeFilter + + value = b"some-value" + row_filter = ValueRangeFilter(start_value=value) + val_range_pb = _ValueRangePB(start_value_closed=value) + expected_pb = _RowFilterPB(value_range_filter=val_range_pb) + assert row_filter.to_pb() == expected_pb + + +def test_value_range_filter_to_pb_exclusive_start(): + from google.cloud.bigtable.row_filters import ValueRangeFilter + + value = b"some-value" + row_filter = ValueRangeFilter(start_value=value, inclusive_start=False) + val_range_pb = _ValueRangePB(start_value_open=value) + expected_pb = _RowFilterPB(value_range_filter=val_range_pb) + assert row_filter.to_pb() == expected_pb + + +def test_value_range_filter_to_pb_inclusive_end(): + from google.cloud.bigtable.row_filters import ValueRangeFilter + + value = b"some-value" + row_filter = ValueRangeFilter(end_value=value) + val_range_pb = _ValueRangePB(end_value_closed=value) + expected_pb = _RowFilterPB(value_range_filter=val_range_pb) + assert row_filter.to_pb() == expected_pb + + +def test_value_range_filter_to_pb_exclusive_end(): + from google.cloud.bigtable.row_filters import ValueRangeFilter + + value = b"some-value" + row_filter = ValueRangeFilter(end_value=value, inclusive_end=False) + val_range_pb = _ValueRangePB(end_value_open=value) + expected_pb = _RowFilterPB(value_range_filter=val_range_pb) + assert row_filter.to_pb() == expected_pb + + +def test_cell_count_constructor(): + from google.cloud.bigtable.row_filters import _CellCountFilter + + num_cells = object() + row_filter = _CellCountFilter(num_cells) + assert row_filter.num_cells is num_cells + + +def test_cell_count___eq__type_differ(): + from google.cloud.bigtable.row_filters import _CellCountFilter + + num_cells = object() + row_filter1 = _CellCountFilter(num_cells) + row_filter2 = object() + assert not (row_filter1 == row_filter2) + + +def test_cell_count___eq__same_value(): + from google.cloud.bigtable.row_filters import _CellCountFilter + + num_cells = object() + row_filter1 = _CellCountFilter(num_cells) + row_filter2 = _CellCountFilter(num_cells) + assert row_filter1 == row_filter2 + + +def test_cell_count___ne__same_value(): + from google.cloud.bigtable.row_filters import _CellCountFilter + + num_cells = object() + row_filter1 = _CellCountFilter(num_cells) + row_filter2 = _CellCountFilter(num_cells) + assert not (row_filter1 != row_filter2) + + +def test_cells_row_offset_filter_to_pb(): + from google.cloud.bigtable.row_filters import CellsRowOffsetFilter + + num_cells = 76 + row_filter = CellsRowOffsetFilter(num_cells) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(cells_per_row_offset_filter=num_cells) + assert pb_val == expected_pb + + +def test_cells_row_limit_filter_to_pb(): + from google.cloud.bigtable.row_filters import CellsRowLimitFilter + + num_cells = 189 + row_filter = CellsRowLimitFilter(num_cells) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(cells_per_row_limit_filter=num_cells) + assert pb_val == expected_pb + + +def test_cells_column_limit_filter_to_pb(): + from google.cloud.bigtable.row_filters import CellsColumnLimitFilter + + num_cells = 10 + row_filter = CellsColumnLimitFilter(num_cells) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(cells_per_column_limit_filter=num_cells) + assert pb_val == expected_pb + + +def test_strip_value_transformer_filter_to_pb(): + from google.cloud.bigtable.row_filters import StripValueTransformerFilter + + flag = True + row_filter = StripValueTransformerFilter(flag) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(strip_value_transformer=flag) + assert pb_val == expected_pb + + +def test_apply_label_filter_constructor(): + from google.cloud.bigtable.row_filters import ApplyLabelFilter + + label = object() + row_filter = ApplyLabelFilter(label) + assert row_filter.label is label + + +def test_apply_label_filter___eq__type_differ(): + from google.cloud.bigtable.row_filters import ApplyLabelFilter + + label = object() + row_filter1 = ApplyLabelFilter(label) + row_filter2 = object() + assert not (row_filter1 == row_filter2) + + +def test_apply_label_filter___eq__same_value(): + from google.cloud.bigtable.row_filters import ApplyLabelFilter + + label = object() + row_filter1 = ApplyLabelFilter(label) + row_filter2 = ApplyLabelFilter(label) + assert row_filter1 == row_filter2 + + +def test_apply_label_filter_to_pb(): + from google.cloud.bigtable.row_filters import ApplyLabelFilter + + label = u"label" + row_filter = ApplyLabelFilter(label) + pb_val = row_filter.to_pb() + expected_pb = _RowFilterPB(apply_label_transformer=label) + assert pb_val == expected_pb + + +def test_filter_combination_constructor_defaults(): + from google.cloud.bigtable.row_filters import _FilterCombination + + row_filter = _FilterCombination() + assert row_filter.filters == [] + + +def test_filter_combination_constructor_explicit(): + from google.cloud.bigtable.row_filters import _FilterCombination + + filters = object() + row_filter = _FilterCombination(filters=filters) + assert row_filter.filters is filters + + +def test_filter_combination___eq__(): + from google.cloud.bigtable.row_filters import _FilterCombination + + filters = object() + row_filter1 = _FilterCombination(filters=filters) + row_filter2 = _FilterCombination(filters=filters) + assert row_filter1 == row_filter2 + + +def test_filter_combination___eq__type_differ(): + from google.cloud.bigtable.row_filters import _FilterCombination + + filters = object() + row_filter1 = _FilterCombination(filters=filters) + row_filter2 = object() + assert not (row_filter1 == row_filter2) + + +def test_row_filter_chain_to_pb(): + from google.cloud.bigtable.row_filters import RowFilterChain + from google.cloud.bigtable.row_filters import RowSampleFilter + from google.cloud.bigtable.row_filters import StripValueTransformerFilter + + row_filter1 = StripValueTransformerFilter(True) + row_filter1_pb = row_filter1.to_pb() + + row_filter2 = RowSampleFilter(0.25) + row_filter2_pb = row_filter2.to_pb() + + row_filter3 = RowFilterChain(filters=[row_filter1, row_filter2]) + filter_pb = row_filter3.to_pb() + + expected_pb = _RowFilterPB( + chain=_RowFilterChainPB(filters=[row_filter1_pb, row_filter2_pb]) + ) + assert filter_pb == expected_pb + + +def test_row_filter_chain_to_pb_nested(): + from google.cloud.bigtable.row_filters import CellsRowLimitFilter + from google.cloud.bigtable.row_filters import RowFilterChain + from google.cloud.bigtable.row_filters import RowSampleFilter + from google.cloud.bigtable.row_filters import StripValueTransformerFilter + + row_filter1 = StripValueTransformerFilter(True) + row_filter2 = RowSampleFilter(0.25) + + row_filter3 = RowFilterChain(filters=[row_filter1, row_filter2]) + row_filter3_pb = row_filter3.to_pb() + + row_filter4 = CellsRowLimitFilter(11) + row_filter4_pb = row_filter4.to_pb() + + row_filter5 = RowFilterChain(filters=[row_filter3, row_filter4]) + filter_pb = row_filter5.to_pb() + + expected_pb = _RowFilterPB( + chain=_RowFilterChainPB(filters=[row_filter3_pb, row_filter4_pb]) + ) + assert filter_pb == expected_pb + + +def test_row_filter_union_to_pb(): + from google.cloud.bigtable.row_filters import RowFilterUnion + from google.cloud.bigtable.row_filters import RowSampleFilter + from google.cloud.bigtable.row_filters import StripValueTransformerFilter + + row_filter1 = StripValueTransformerFilter(True) + row_filter1_pb = row_filter1.to_pb() + + row_filter2 = RowSampleFilter(0.25) + row_filter2_pb = row_filter2.to_pb() + + row_filter3 = RowFilterUnion(filters=[row_filter1, row_filter2]) + filter_pb = row_filter3.to_pb() + + expected_pb = _RowFilterPB( + interleave=_RowFilterInterleavePB(filters=[row_filter1_pb, row_filter2_pb]) + ) + assert filter_pb == expected_pb + + +def test_row_filter_union_to_pb_nested(): + from google.cloud.bigtable.row_filters import CellsRowLimitFilter + from google.cloud.bigtable.row_filters import RowFilterUnion + from google.cloud.bigtable.row_filters import RowSampleFilter + from google.cloud.bigtable.row_filters import StripValueTransformerFilter + + row_filter1 = StripValueTransformerFilter(True) + row_filter2 = RowSampleFilter(0.25) + + row_filter3 = RowFilterUnion(filters=[row_filter1, row_filter2]) + row_filter3_pb = row_filter3.to_pb() + + row_filter4 = CellsRowLimitFilter(11) + row_filter4_pb = row_filter4.to_pb() + + row_filter5 = RowFilterUnion(filters=[row_filter3, row_filter4]) + filter_pb = row_filter5.to_pb() + + expected_pb = _RowFilterPB( + interleave=_RowFilterInterleavePB(filters=[row_filter3_pb, row_filter4_pb]) + ) + assert filter_pb == expected_pb + + +def test_conditional_row_filter_constructor(): + from google.cloud.bigtable.row_filters import ConditionalRowFilter + + base_filter = object() + true_filter = object() + false_filter = object() + cond_filter = ConditionalRowFilter( + base_filter, true_filter=true_filter, false_filter=false_filter + ) + assert cond_filter.base_filter is base_filter + assert cond_filter.true_filter is true_filter + assert cond_filter.false_filter is false_filter + + +def test_conditional_row_filter___eq__(): + from google.cloud.bigtable.row_filters import ConditionalRowFilter + + base_filter = object() + true_filter = object() + false_filter = object() + cond_filter1 = ConditionalRowFilter( + base_filter, true_filter=true_filter, false_filter=false_filter + ) + cond_filter2 = ConditionalRowFilter( + base_filter, true_filter=true_filter, false_filter=false_filter + ) + assert cond_filter1 == cond_filter2 + + +def test_conditional_row_filter___eq__type_differ(): + from google.cloud.bigtable.row_filters import ConditionalRowFilter + + base_filter = object() + true_filter = object() + false_filter = object() + cond_filter1 = ConditionalRowFilter( + base_filter, true_filter=true_filter, false_filter=false_filter + ) + cond_filter2 = object() + assert not (cond_filter1 == cond_filter2) + + +def test_conditional_row_filter_to_pb(): + from google.cloud.bigtable.row_filters import ConditionalRowFilter + from google.cloud.bigtable.row_filters import CellsRowOffsetFilter + from google.cloud.bigtable.row_filters import RowSampleFilter + from google.cloud.bigtable.row_filters import StripValueTransformerFilter + + row_filter1 = StripValueTransformerFilter(True) + row_filter1_pb = row_filter1.to_pb() + + row_filter2 = RowSampleFilter(0.25) + row_filter2_pb = row_filter2.to_pb() + + row_filter3 = CellsRowOffsetFilter(11) + row_filter3_pb = row_filter3.to_pb() + + row_filter4 = ConditionalRowFilter( + row_filter1, true_filter=row_filter2, false_filter=row_filter3 + ) + filter_pb = row_filter4.to_pb() + + expected_pb = _RowFilterPB( + condition=_RowFilterConditionPB( + predicate_filter=row_filter1_pb, + true_filter=row_filter2_pb, + false_filter=row_filter3_pb, ) - self.assertEqual(filter_pb, expected_pb) + ) + assert filter_pb == expected_pb - def test_to_pb_true_only(self): - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable.row_filters import StripValueTransformerFilter - row_filter1 = StripValueTransformerFilter(True) - row_filter1_pb = row_filter1.to_pb() +def test_conditional_row_filter_to_pb_true_only(): + from google.cloud.bigtable.row_filters import ConditionalRowFilter + from google.cloud.bigtable.row_filters import RowSampleFilter + from google.cloud.bigtable.row_filters import StripValueTransformerFilter - row_filter2 = RowSampleFilter(0.25) - row_filter2_pb = row_filter2.to_pb() + row_filter1 = StripValueTransformerFilter(True) + row_filter1_pb = row_filter1.to_pb() - row_filter3 = self._make_one(row_filter1, true_filter=row_filter2) - filter_pb = row_filter3.to_pb() + row_filter2 = RowSampleFilter(0.25) + row_filter2_pb = row_filter2.to_pb() - expected_pb = _RowFilterPB( - condition=_RowFilterConditionPB( - predicate_filter=row_filter1_pb, true_filter=row_filter2_pb - ) + row_filter3 = ConditionalRowFilter(row_filter1, true_filter=row_filter2) + filter_pb = row_filter3.to_pb() + + expected_pb = _RowFilterPB( + condition=_RowFilterConditionPB( + predicate_filter=row_filter1_pb, true_filter=row_filter2_pb ) - self.assertEqual(filter_pb, expected_pb) + ) + assert filter_pb == expected_pb + - def test_to_pb_false_only(self): - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable.row_filters import StripValueTransformerFilter +def test_conditional_row_filter_to_pb_false_only(): + from google.cloud.bigtable.row_filters import ConditionalRowFilter + from google.cloud.bigtable.row_filters import RowSampleFilter + from google.cloud.bigtable.row_filters import StripValueTransformerFilter - row_filter1 = StripValueTransformerFilter(True) - row_filter1_pb = row_filter1.to_pb() + row_filter1 = StripValueTransformerFilter(True) + row_filter1_pb = row_filter1.to_pb() - row_filter2 = RowSampleFilter(0.25) - row_filter2_pb = row_filter2.to_pb() + row_filter2 = RowSampleFilter(0.25) + row_filter2_pb = row_filter2.to_pb() - row_filter3 = self._make_one(row_filter1, false_filter=row_filter2) - filter_pb = row_filter3.to_pb() + row_filter3 = ConditionalRowFilter(row_filter1, false_filter=row_filter2) + filter_pb = row_filter3.to_pb() - expected_pb = _RowFilterPB( - condition=_RowFilterConditionPB( - predicate_filter=row_filter1_pb, false_filter=row_filter2_pb - ) + expected_pb = _RowFilterPB( + condition=_RowFilterConditionPB( + predicate_filter=row_filter1_pb, false_filter=row_filter2_pb ) - self.assertEqual(filter_pb, expected_pb) + ) + assert filter_pb == expected_pb def _ColumnRangePB(*args, **kw): From eae19732ae4d879c684bf88114cd5c53824448c1 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 21 Oct 2021 12:33:39 -0400 Subject: [PATCH 14/16] tests: refactor 'row_set' unit tests w/ pytest idioms --- tests/unit/test_row_set.py | 526 ++++++++++++++++++++----------------- 1 file changed, 287 insertions(+), 239 deletions(-) diff --git a/tests/unit/test_row_set.py b/tests/unit/test_row_set.py index c1fa4ca87..1a33be720 100644 --- a/tests/unit/test_row_set.py +++ b/tests/unit/test_row_set.py @@ -13,260 +13,308 @@ # limitations under the License. -import unittest -from google.cloud.bigtable.row_set import RowRange -from google.cloud._helpers import _to_bytes +def test_row_set_constructor(): + from google.cloud.bigtable.row_set import RowSet + row_set = RowSet() + assert [] == row_set.row_keys + assert [] == row_set.row_ranges -class TestRowSet(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_set import RowSet - return RowSet +def test_row_set__eq__(): + from google.cloud.bigtable.row_set import RowRange + from google.cloud.bigtable.row_set import RowSet - def _make_one(self): - return self._get_target_class()() + row_key1 = b"row_key1" + row_key2 = b"row_key1" + row_range1 = RowRange(b"row_key4", b"row_key9") + row_range2 = RowRange(b"row_key4", b"row_key9") - def test_constructor(self): - row_set = self._make_one() - self.assertEqual([], row_set.row_keys) - self.assertEqual([], row_set.row_ranges) + row_set1 = RowSet() + row_set2 = RowSet() - def test__eq__(self): - row_key1 = b"row_key1" - row_key2 = b"row_key1" - row_range1 = RowRange(b"row_key4", b"row_key9") - row_range2 = RowRange(b"row_key4", b"row_key9") + row_set1.add_row_key(row_key1) + row_set2.add_row_key(row_key2) + row_set1.add_row_range(row_range1) + row_set2.add_row_range(row_range2) - row_set1 = self._make_one() - row_set2 = self._make_one() + assert row_set1 == row_set2 - row_set1.add_row_key(row_key1) - row_set2.add_row_key(row_key2) - row_set1.add_row_range(row_range1) - row_set2.add_row_range(row_range2) - self.assertEqual(row_set1, row_set2) +def test_row_set__eq__type_differ(): + from google.cloud.bigtable.row_set import RowSet - def test__eq__type_differ(self): - row_set1 = self._make_one() - row_set2 = object() - self.assertNotEqual(row_set1, row_set2) + row_set1 = RowSet() + row_set2 = object() + assert not (row_set1 == row_set2) - def test__eq__len_row_keys_differ(self): - row_key1 = b"row_key1" - row_key2 = b"row_key1" - row_set1 = self._make_one() - row_set2 = self._make_one() - - row_set1.add_row_key(row_key1) - row_set1.add_row_key(row_key2) - row_set2.add_row_key(row_key2) - - self.assertNotEqual(row_set1, row_set2) - - def test__eq__len_row_ranges_differ(self): - row_range1 = RowRange(b"row_key4", b"row_key9") - row_range2 = RowRange(b"row_key4", b"row_key9") - - row_set1 = self._make_one() - row_set2 = self._make_one() - - row_set1.add_row_range(row_range1) - row_set1.add_row_range(row_range2) - row_set2.add_row_range(row_range2) - - self.assertNotEqual(row_set1, row_set2) - - def test__eq__row_keys_differ(self): - row_set1 = self._make_one() - row_set2 = self._make_one() - - row_set1.add_row_key(b"row_key1") - row_set1.add_row_key(b"row_key2") - row_set1.add_row_key(b"row_key3") - row_set2.add_row_key(b"row_key1") - row_set2.add_row_key(b"row_key2") - row_set2.add_row_key(b"row_key4") +def test_row_set__eq__len_row_keys_differ(): + from google.cloud.bigtable.row_set import RowSet - self.assertNotEqual(row_set1, row_set2) - - def test__eq__row_ranges_differ(self): - row_range1 = RowRange(b"row_key4", b"row_key9") - row_range2 = RowRange(b"row_key14", b"row_key19") - row_range3 = RowRange(b"row_key24", b"row_key29") + row_key1 = b"row_key1" + row_key2 = b"row_key1" - row_set1 = self._make_one() - row_set2 = self._make_one() + row_set1 = RowSet() + row_set2 = RowSet() - row_set1.add_row_range(row_range1) - row_set1.add_row_range(row_range2) - row_set1.add_row_range(row_range3) - row_set2.add_row_range(row_range1) - row_set2.add_row_range(row_range2) - - self.assertNotEqual(row_set1, row_set2) - - def test__ne__(self): - row_key1 = b"row_key1" - row_key2 = b"row_key1" - row_range1 = RowRange(b"row_key4", b"row_key9") - row_range2 = RowRange(b"row_key5", b"row_key9") - - row_set1 = self._make_one() - row_set2 = self._make_one() - - row_set1.add_row_key(row_key1) - row_set2.add_row_key(row_key2) - row_set1.add_row_range(row_range1) - row_set2.add_row_range(row_range2) - - self.assertNotEqual(row_set1, row_set2) - - def test__ne__same_value(self): - row_key1 = b"row_key1" - row_key2 = b"row_key1" - row_range1 = RowRange(b"row_key4", b"row_key9") - row_range2 = RowRange(b"row_key4", b"row_key9") - - row_set1 = self._make_one() - row_set2 = self._make_one() - - row_set1.add_row_key(row_key1) - row_set2.add_row_key(row_key2) - row_set1.add_row_range(row_range1) - row_set2.add_row_range(row_range2) - - comparison_val = row_set1 != row_set2 - self.assertFalse(comparison_val) - - def test_add_row_key(self): - row_set = self._make_one() - row_set.add_row_key("row_key1") - row_set.add_row_key("row_key2") - self.assertEqual(["row_key1", "row_key2"], row_set.row_keys) - - def test_add_row_range(self): - row_set = self._make_one() - row_range1 = RowRange(b"row_key1", b"row_key9") - row_range2 = RowRange(b"row_key21", b"row_key29") - row_set.add_row_range(row_range1) - row_set.add_row_range(row_range2) - expected = [row_range1, row_range2] - self.assertEqual(expected, row_set.row_ranges) - - def test_add_row_range_from_keys(self): - row_set = self._make_one() - row_set.add_row_range_from_keys( - start_key=b"row_key1", - end_key=b"row_key9", - start_inclusive=False, - end_inclusive=True, - ) - self.assertEqual(row_set.row_ranges[0].end_key, b"row_key9") - - def test_add_row_range_with_prefix(self): - row_set = self._make_one() - row_set.add_row_range_with_prefix("row") - self.assertEqual(row_set.row_ranges[0].end_key, b"rox") - - def test__update_message_request(self): - row_set = self._make_one() - table_name = "table_name" - row_set.add_row_key("row_key1") - row_range1 = RowRange(b"row_key21", b"row_key29") - row_set.add_row_range(row_range1) - - request = _ReadRowsRequestPB(table_name=table_name) - row_set._update_message_request(request) - - expected_request = _ReadRowsRequestPB(table_name=table_name) - expected_request.rows.row_keys.append(_to_bytes("row_key1")) - - expected_request.rows.row_ranges.append(row_range1.get_range_kwargs()) - - self.assertEqual(request, expected_request) - - -class TestRowRange(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.row_set import RowRange - - return RowRange - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_constructor(self): - start_key = "row_key1" - end_key = "row_key9" - row_range = self._make_one(start_key, end_key) - self.assertEqual(start_key, row_range.start_key) - self.assertEqual(end_key, row_range.end_key) - self.assertTrue(row_range.start_inclusive) - self.assertFalse(row_range.end_inclusive) - - def test___hash__set_equality(self): - row_range1 = self._make_one("row_key1", "row_key9") - row_range2 = self._make_one("row_key1", "row_key9") - set_one = {row_range1, row_range2} - set_two = {row_range1, row_range2} - self.assertEqual(set_one, set_two) - - def test___hash__not_equals(self): - row_range1 = self._make_one("row_key1", "row_key9") - row_range2 = self._make_one("row_key1", "row_key19") - set_one = {row_range1} - set_two = {row_range2} - self.assertNotEqual(set_one, set_two) - - def test__eq__(self): - start_key = b"row_key1" - end_key = b"row_key9" - row_range1 = self._make_one(start_key, end_key, True, False) - row_range2 = self._make_one(start_key, end_key, True, False) - self.assertEqual(row_range1, row_range2) - - def test___eq__type_differ(self): - start_key = b"row_key1" - end_key = b"row_key9" - row_range1 = self._make_one(start_key, end_key, True, False) - row_range2 = object() - self.assertNotEqual(row_range1, row_range2) - - def test__ne__(self): - start_key = b"row_key1" - end_key = b"row_key9" - row_range1 = self._make_one(start_key, end_key, True, False) - row_range2 = self._make_one(start_key, end_key, False, True) - self.assertNotEqual(row_range1, row_range2) - - def test__ne__same_value(self): - start_key = b"row_key1" - end_key = b"row_key9" - row_range1 = self._make_one(start_key, end_key, True, False) - row_range2 = self._make_one(start_key, end_key, True, False) - comparison_val = row_range1 != row_range2 - self.assertFalse(comparison_val) - - def test_get_range_kwargs_closed_open(self): - start_key = b"row_key1" - end_key = b"row_key9" - expected_result = {"start_key_closed": start_key, "end_key_open": end_key} - row_range = self._make_one(start_key, end_key) - actual_result = row_range.get_range_kwargs() - self.assertEqual(expected_result, actual_result) - - def test_get_range_kwargs_open_closed(self): - start_key = b"row_key1" - end_key = b"row_key9" - expected_result = {"start_key_open": start_key, "end_key_closed": end_key} - row_range = self._make_one(start_key, end_key, False, True) - actual_result = row_range.get_range_kwargs() - self.assertEqual(expected_result, actual_result) + row_set1.add_row_key(row_key1) + row_set1.add_row_key(row_key2) + row_set2.add_row_key(row_key2) + + assert not (row_set1 == row_set2) + + +def test_row_set__eq__len_row_ranges_differ(): + from google.cloud.bigtable.row_set import RowRange + from google.cloud.bigtable.row_set import RowSet + + row_range1 = RowRange(b"row_key4", b"row_key9") + row_range2 = RowRange(b"row_key4", b"row_key9") + + row_set1 = RowSet() + row_set2 = RowSet() + + row_set1.add_row_range(row_range1) + row_set1.add_row_range(row_range2) + row_set2.add_row_range(row_range2) + + assert not (row_set1 == row_set2) + + +def test_row_set__eq__row_keys_differ(): + from google.cloud.bigtable.row_set import RowSet + + row_set1 = RowSet() + row_set2 = RowSet() + + row_set1.add_row_key(b"row_key1") + row_set1.add_row_key(b"row_key2") + row_set1.add_row_key(b"row_key3") + row_set2.add_row_key(b"row_key1") + row_set2.add_row_key(b"row_key2") + row_set2.add_row_key(b"row_key4") + + assert not (row_set1 == row_set2) + + +def test_row_set__eq__row_ranges_differ(): + from google.cloud.bigtable.row_set import RowRange + from google.cloud.bigtable.row_set import RowSet + + row_range1 = RowRange(b"row_key4", b"row_key9") + row_range2 = RowRange(b"row_key14", b"row_key19") + row_range3 = RowRange(b"row_key24", b"row_key29") + + row_set1 = RowSet() + row_set2 = RowSet() + + row_set1.add_row_range(row_range1) + row_set1.add_row_range(row_range2) + row_set1.add_row_range(row_range3) + row_set2.add_row_range(row_range1) + row_set2.add_row_range(row_range2) + + assert not (row_set1 == row_set2) + + +def test_row_set__ne__(): + from google.cloud.bigtable.row_set import RowRange + from google.cloud.bigtable.row_set import RowSet + + row_key1 = b"row_key1" + row_key2 = b"row_key1" + row_range1 = RowRange(b"row_key4", b"row_key9") + row_range2 = RowRange(b"row_key5", b"row_key9") + + row_set1 = RowSet() + row_set2 = RowSet() + + row_set1.add_row_key(row_key1) + row_set2.add_row_key(row_key2) + row_set1.add_row_range(row_range1) + row_set2.add_row_range(row_range2) + + assert row_set1 != row_set2 + + +def test_row_set__ne__same_value(): + from google.cloud.bigtable.row_set import RowRange + from google.cloud.bigtable.row_set import RowSet + + row_key1 = b"row_key1" + row_key2 = b"row_key1" + row_range1 = RowRange(b"row_key4", b"row_key9") + row_range2 = RowRange(b"row_key4", b"row_key9") + + row_set1 = RowSet() + row_set2 = RowSet() + + row_set1.add_row_key(row_key1) + row_set2.add_row_key(row_key2) + row_set1.add_row_range(row_range1) + row_set2.add_row_range(row_range2) + + assert not (row_set1 != row_set2) + + +def test_row_set_add_row_key(): + from google.cloud.bigtable.row_set import RowSet + + row_set = RowSet() + row_set.add_row_key("row_key1") + row_set.add_row_key("row_key2") + assert ["row_key1" == "row_key2"], row_set.row_keys + + +def test_row_set_add_row_range(): + from google.cloud.bigtable.row_set import RowRange + from google.cloud.bigtable.row_set import RowSet + + row_set = RowSet() + row_range1 = RowRange(b"row_key1", b"row_key9") + row_range2 = RowRange(b"row_key21", b"row_key29") + row_set.add_row_range(row_range1) + row_set.add_row_range(row_range2) + expected = [row_range1, row_range2] + assert expected == row_set.row_ranges + + +def test_row_set_add_row_range_from_keys(): + from google.cloud.bigtable.row_set import RowSet + + row_set = RowSet() + row_set.add_row_range_from_keys( + start_key=b"row_key1", + end_key=b"row_key9", + start_inclusive=False, + end_inclusive=True, + ) + assert row_set.row_ranges[0].end_key == b"row_key9" + + +def test_row_set_add_row_range_with_prefix(): + from google.cloud.bigtable.row_set import RowSet + + row_set = RowSet() + row_set.add_row_range_with_prefix("row") + assert row_set.row_ranges[0].end_key == b"rox" + + +def test_row_set__update_message_request(): + from google.cloud._helpers import _to_bytes + from google.cloud.bigtable.row_set import RowRange + from google.cloud.bigtable.row_set import RowSet + + row_set = RowSet() + table_name = "table_name" + row_set.add_row_key("row_key1") + row_range1 = RowRange(b"row_key21", b"row_key29") + row_set.add_row_range(row_range1) + + request = _ReadRowsRequestPB(table_name=table_name) + row_set._update_message_request(request) + + expected_request = _ReadRowsRequestPB(table_name=table_name) + expected_request.rows.row_keys.append(_to_bytes("row_key1")) + + expected_request.rows.row_ranges.append(row_range1.get_range_kwargs()) + + assert request == expected_request + + +def test_row_range_constructor(): + from google.cloud.bigtable.row_set import RowRange + + start_key = "row_key1" + end_key = "row_key9" + row_range = RowRange(start_key, end_key) + assert start_key == row_range.start_key + assert end_key == row_range.end_key + assert row_range.start_inclusive + assert not row_range.end_inclusive + + +def test_row_range___hash__set_equality(): + from google.cloud.bigtable.row_set import RowRange + + row_range1 = RowRange("row_key1", "row_key9") + row_range2 = RowRange("row_key1", "row_key9") + set_one = {row_range1, row_range2} + set_two = {row_range1, row_range2} + assert set_one == set_two + + +def test_row_range___hash__not_equals(): + from google.cloud.bigtable.row_set import RowRange + + row_range1 = RowRange("row_key1", "row_key9") + row_range2 = RowRange("row_key1", "row_key19") + set_one = {row_range1} + set_two = {row_range2} + assert set_one != set_two + + +def test_row_range__eq__(): + from google.cloud.bigtable.row_set import RowRange + + start_key = b"row_key1" + end_key = b"row_key9" + row_range1 = RowRange(start_key, end_key, True, False) + row_range2 = RowRange(start_key, end_key, True, False) + assert row_range1 == row_range2 + + +def test_row_range___eq__type_differ(): + from google.cloud.bigtable.row_set import RowRange + + start_key = b"row_key1" + end_key = b"row_key9" + row_range1 = RowRange(start_key, end_key, True, False) + row_range2 = object() + assert row_range1 != row_range2 + + +def test_row_range__ne__(): + from google.cloud.bigtable.row_set import RowRange + + start_key = b"row_key1" + end_key = b"row_key9" + row_range1 = RowRange(start_key, end_key, True, False) + row_range2 = RowRange(start_key, end_key, False, True) + assert row_range1 != row_range2 + + +def test_row_range__ne__same_value(): + from google.cloud.bigtable.row_set import RowRange + + start_key = b"row_key1" + end_key = b"row_key9" + row_range1 = RowRange(start_key, end_key, True, False) + row_range2 = RowRange(start_key, end_key, True, False) + assert not (row_range1 != row_range2) + + +def test_row_range_get_range_kwargs_closed_open(): + from google.cloud.bigtable.row_set import RowRange + + start_key = b"row_key1" + end_key = b"row_key9" + expected_result = {"start_key_closed": start_key, "end_key_open": end_key} + row_range = RowRange(start_key, end_key) + actual_result = row_range.get_range_kwargs() + assert expected_result == actual_result + + +def test_row_range_get_range_kwargs_open_closed(): + from google.cloud.bigtable.row_set import RowRange + + start_key = b"row_key1" + end_key = b"row_key9" + expected_result = {"start_key_open": start_key, "end_key_closed": end_key} + row_range = RowRange(start_key, end_key, False, True) + actual_result = row_range.get_range_kwargs() + assert expected_result == actual_result def _ReadRowsRequestPB(*args, **kw): From 3192c37e643ea2abe6bd9b0633e13c62512ef4db Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 21 Oct 2021 14:55:41 -0400 Subject: [PATCH 15/16] tests: refactor 'table' unit tests w/ pytest idioms --- tests/unit/test_table.py | 3717 ++++++++++++++++++-------------------- 1 file changed, 1776 insertions(+), 1941 deletions(-) diff --git a/tests/unit/test_table.py b/tests/unit/test_table.py index bb6cca6a7..eacde3c3e 100644 --- a/tests/unit/test_table.py +++ b/tests/unit/test_table.py @@ -13,2154 +13,1996 @@ # limitations under the License. -import unittest import warnings import mock +import pytest +from grpc import StatusCode -from ._testing import _make_credentials from google.api_core.exceptions import DeadlineExceeded +from ._testing import _make_credentials +PROJECT_ID = "project-id" +INSTANCE_ID = "instance-id" +INSTANCE_NAME = "projects/" + PROJECT_ID + "/instances/" + INSTANCE_ID +CLUSTER_ID = "cluster-id" +CLUSTER_NAME = INSTANCE_NAME + "/clusters/" + CLUSTER_ID +TABLE_ID = "table-id" +TABLE_NAME = INSTANCE_NAME + "/tables/" + TABLE_ID +BACKUP_ID = "backup-id" +BACKUP_NAME = CLUSTER_NAME + "/backups/" + BACKUP_ID +ROW_KEY = b"row-key" +ROW_KEY_1 = b"row-key-1" +ROW_KEY_2 = b"row-key-2" +ROW_KEY_3 = b"row-key-3" +FAMILY_NAME = "family" +QUALIFIER = b"qualifier" +TIMESTAMP_MICROS = 100 +VALUE = b"value" + +# RPC Status Codes +SUCCESS = StatusCode.OK.value[0] +RETRYABLE_1 = StatusCode.DEADLINE_EXCEEDED.value[0] +RETRYABLE_2 = StatusCode.ABORTED.value[0] +RETRYABLE_3 = StatusCode.UNAVAILABLE.value[0] +RETRYABLES = (RETRYABLE_1, RETRYABLE_2, RETRYABLE_3) +NON_RETRYABLE = StatusCode.CANCELLED.value[0] + + +@mock.patch("google.cloud.bigtable.table._MAX_BULK_MUTATIONS", new=3) +def test__compile_mutation_entries_w_too_many_mutations(): + from google.cloud.bigtable.row import DirectRow + from google.cloud.bigtable.table import TooManyMutationsError + from google.cloud.bigtable.table import _compile_mutation_entries + + table = mock.Mock(name="table", spec=["name"]) + table.name = "table" + rows = [ + DirectRow(row_key=b"row_key", table=table), + DirectRow(row_key=b"row_key_2", table=table), + ] + rows[0].set_cell("cf1", b"c1", 1) + rows[0].set_cell("cf1", b"c1", 2) + rows[1].set_cell("cf1", b"c1", 3) + rows[1].set_cell("cf1", b"c1", 4) + + with pytest.raises(TooManyMutationsError): + _compile_mutation_entries("table", rows) -class Test__compile_mutation_entries(unittest.TestCase): - def _call_fut(self, table_name, rows): - from google.cloud.bigtable.table import _compile_mutation_entries - return _compile_mutation_entries(table_name, rows) +def test__compile_mutation_entries_normal(): + from google.cloud.bigtable.row import DirectRow + from google.cloud.bigtable.table import _compile_mutation_entries + from google.cloud.bigtable_v2.types import MutateRowsRequest + from google.cloud.bigtable_v2.types import data - @mock.patch("google.cloud.bigtable.table._MAX_BULK_MUTATIONS", new=3) - def test_w_too_many_mutations(self): - from google.cloud.bigtable.row import DirectRow - from google.cloud.bigtable.table import TooManyMutationsError + table = mock.Mock(spec=["name"]) + table.name = "table" + rows = [ + DirectRow(row_key=b"row_key", table=table), + DirectRow(row_key=b"row_key_2"), + ] + rows[0].set_cell("cf1", b"c1", b"1") + rows[1].set_cell("cf1", b"c1", b"2") + + result = _compile_mutation_entries("table", rows) + + entry_1 = MutateRowsRequest.Entry() + entry_1.row_key = b"row_key" + mutations_1 = data.Mutation() + mutations_1.set_cell.family_name = "cf1" + mutations_1.set_cell.column_qualifier = b"c1" + mutations_1.set_cell.timestamp_micros = -1 + mutations_1.set_cell.value = b"1" + entry_1.mutations.append(mutations_1) + + entry_2 = MutateRowsRequest.Entry() + entry_2.row_key = b"row_key_2" + mutations_2 = data.Mutation() + mutations_2.set_cell.family_name = "cf1" + mutations_2.set_cell.column_qualifier = b"c1" + mutations_2.set_cell.timestamp_micros = -1 + mutations_2.set_cell.value = b"2" + entry_2.mutations.append(mutations_2) + assert result == [entry_1, entry_2] + + +def test__check_row_table_name_w_wrong_table_name(): + from google.cloud.bigtable.table import _check_row_table_name + from google.cloud.bigtable.table import TableMismatchError + from google.cloud.bigtable.row import DirectRow + + table = mock.Mock(name="table", spec=["name"]) + table.name = "table" + row = DirectRow(row_key=b"row_key", table=table) + + with pytest.raises(TableMismatchError): + _check_row_table_name("other_table", row) + + +def test__check_row_table_name_w_right_table_name(): + from google.cloud.bigtable.row import DirectRow + from google.cloud.bigtable.table import _check_row_table_name + + table = mock.Mock(name="table", spec=["name"]) + table.name = "table" + row = DirectRow(row_key=b"row_key", table=table) - table = mock.Mock(name="table", spec=["name"]) - table.name = "table" - rows = [ - DirectRow(row_key=b"row_key", table=table), - DirectRow(row_key=b"row_key_2", table=table), - ] - rows[0].set_cell("cf1", b"c1", 1) - rows[0].set_cell("cf1", b"c1", 2) - rows[1].set_cell("cf1", b"c1", 3) - rows[1].set_cell("cf1", b"c1", 4) - - with self.assertRaises(TooManyMutationsError): - self._call_fut("table", rows) - - def test_normal(self): - from google.cloud.bigtable.row import DirectRow - from google.cloud.bigtable_v2.types import MutateRowsRequest - from google.cloud.bigtable_v2.types import data - - table = mock.Mock(spec=["name"]) - table.name = "table" - rows = [ - DirectRow(row_key=b"row_key", table=table), - DirectRow(row_key=b"row_key_2"), - ] - rows[0].set_cell("cf1", b"c1", b"1") - rows[1].set_cell("cf1", b"c1", b"2") - - result = self._call_fut("table", rows) - - entry_1 = MutateRowsRequest.Entry() - entry_1.row_key = b"row_key" - mutations_1 = data.Mutation() - mutations_1.set_cell.family_name = "cf1" - mutations_1.set_cell.column_qualifier = b"c1" - mutations_1.set_cell.timestamp_micros = -1 - mutations_1.set_cell.value = b"1" - entry_1.mutations.append(mutations_1) + assert not _check_row_table_name("table", row) - entry_2 = MutateRowsRequest.Entry() - entry_2.row_key = b"row_key_2" - mutations_2 = data.Mutation() - mutations_2.set_cell.family_name = "cf1" - mutations_2.set_cell.column_qualifier = b"c1" - mutations_2.set_cell.timestamp_micros = -1 - mutations_2.set_cell.value = b"2" - entry_2.mutations.append(mutations_2) - self.assertEqual(result, [entry_1, entry_2]) - - -class Test__check_row_table_name(unittest.TestCase): - def _call_fut(self, table_name, row): - from google.cloud.bigtable.table import _check_row_table_name - - return _check_row_table_name(table_name, row) - - def test_wrong_table_name(self): - from google.cloud.bigtable.table import TableMismatchError - from google.cloud.bigtable.row import DirectRow - - table = mock.Mock(name="table", spec=["name"]) - table.name = "table" - row = DirectRow(row_key=b"row_key", table=table) - with self.assertRaises(TableMismatchError): - self._call_fut("other_table", row) - - def test_right_table_name(self): - from google.cloud.bigtable.row import DirectRow - - table = mock.Mock(name="table", spec=["name"]) - table.name = "table" - row = DirectRow(row_key=b"row_key", table=table) - result = self._call_fut("table", row) - self.assertFalse(result) - - -class Test__check_row_type(unittest.TestCase): - def _call_fut(self, row): - from google.cloud.bigtable.table import _check_row_type - - return _check_row_type(row) - - def test_test_wrong_row_type(self): - from google.cloud.bigtable.row import ConditionalRow - - row = ConditionalRow(row_key=b"row_key", table="table", filter_=None) - with self.assertRaises(TypeError): - self._call_fut(row) - - def test_right_row_type(self): - from google.cloud.bigtable.row import DirectRow - - row = DirectRow(row_key=b"row_key", table="table") - result = self._call_fut(row) - self.assertFalse(result) - - -class TestTable(unittest.TestCase): - - PROJECT_ID = "project-id" - INSTANCE_ID = "instance-id" - INSTANCE_NAME = "projects/" + PROJECT_ID + "/instances/" + INSTANCE_ID - CLUSTER_ID = "cluster-id" - CLUSTER_NAME = INSTANCE_NAME + "/clusters/" + CLUSTER_ID - TABLE_ID = "table-id" - TABLE_NAME = INSTANCE_NAME + "/tables/" + TABLE_ID - BACKUP_ID = "backup-id" - BACKUP_NAME = CLUSTER_NAME + "/backups/" + BACKUP_ID - ROW_KEY = b"row-key" - ROW_KEY_1 = b"row-key-1" - ROW_KEY_2 = b"row-key-2" - ROW_KEY_3 = b"row-key-3" - FAMILY_NAME = "family" - QUALIFIER = b"qualifier" - TIMESTAMP_MICROS = 100 - VALUE = b"value" - _json_tests = None - - @staticmethod - def _get_target_class(): - from google.cloud.bigtable.table import Table - - return Table - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - @staticmethod - def _get_target_client_class(): - from google.cloud.bigtable.client import Client - - return Client - - def _make_client(self, *args, **kwargs): - return self._get_target_client_class()(*args, **kwargs) - - def test_constructor_defaults(self): - instance = mock.Mock(spec=[]) - table = self._make_one(self.TABLE_ID, instance) - - self.assertEqual(table.table_id, self.TABLE_ID) - self.assertIs(table._instance, instance) - self.assertIsNone(table.mutation_timeout) - self.assertIsNone(table._app_profile_id) +def test__check_row_type_w_wrong_row_type(): + from google.cloud.bigtable.row import ConditionalRow + from google.cloud.bigtable.table import _check_row_type - def test_constructor_explicit(self): - instance = mock.Mock(spec=[]) - mutation_timeout = 123 - app_profile_id = "profile-123" + row = ConditionalRow(row_key=b"row_key", table="table", filter_=None) + with pytest.raises(TypeError): + _check_row_type(row) - table = self._make_one( - self.TABLE_ID, - instance, - mutation_timeout=mutation_timeout, - app_profile_id=app_profile_id, - ) - self.assertEqual(table.table_id, self.TABLE_ID) - self.assertIs(table._instance, instance) - self.assertEqual(table.mutation_timeout, mutation_timeout) - self.assertEqual(table._app_profile_id, app_profile_id) - - def test_name(self): - table_data_client = mock.Mock(spec=["table_path"]) - client = mock.Mock( - project=self.PROJECT_ID, - table_data_client=table_data_client, - spec=["project", "table_data_client"], - ) - instance = mock.Mock( - _client=client, - instance_id=self.INSTANCE_ID, - spec=["_client", "instance_id"], - ) +def test__check_row_type_w_right_row_type(): + from google.cloud.bigtable.row import DirectRow + from google.cloud.bigtable.table import _check_row_type - table = self._make_one(self.TABLE_ID, instance) + row = DirectRow(row_key=b"row_key", table="table") + assert not _check_row_type(row) - self.assertEqual(table.name, table_data_client.table_path.return_value) - def _row_methods_helper(self): - client = self._make_client( - project="project-id", credentials=_make_credentials(), admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) - row_key = b"row_key" - return table, row_key +def _make_client(*args, **kwargs): + from google.cloud.bigtable.client import Client - def test_row_factory_direct(self): - from google.cloud.bigtable.row import DirectRow + return Client(*args, **kwargs) - table, row_key = self._row_methods_helper() - with warnings.catch_warnings(record=True) as warned: - row = table.row(row_key) - self.assertIsInstance(row, DirectRow) - self.assertEqual(row._row_key, row_key) - self.assertEqual(row._table, table) +def _make_table(*args, **kwargs): + from google.cloud.bigtable.table import Table - self.assertEqual(len(warned), 1) - self.assertIs(warned[0].category, PendingDeprecationWarning) + return Table(*args, **kwargs) - def test_row_factory_conditional(self): - from google.cloud.bigtable.row import ConditionalRow - table, row_key = self._row_methods_helper() - filter_ = object() +def test_table_constructor_defaults(): + instance = mock.Mock(spec=[]) - with warnings.catch_warnings(record=True) as warned: - row = table.row(row_key, filter_=filter_) + table = _make_table(TABLE_ID, instance) + + assert table.table_id == TABLE_ID + assert table._instance is instance + assert table.mutation_timeout is None + assert table._app_profile_id is None + + +def test_table_constructor_explicit(): + instance = mock.Mock(spec=[]) + mutation_timeout = 123 + app_profile_id = "profile-123" + + table = _make_table( + TABLE_ID, + instance, + mutation_timeout=mutation_timeout, + app_profile_id=app_profile_id, + ) + + assert table.table_id == TABLE_ID + assert table._instance is instance + assert table.mutation_timeout == mutation_timeout + assert table._app_profile_id == app_profile_id + + +def test_table_name(): + table_data_client = mock.Mock(spec=["table_path"]) + client = mock.Mock( + project=PROJECT_ID, + table_data_client=table_data_client, + spec=["project", "table_data_client"], + ) + instance = mock.Mock( + _client=client, instance_id=INSTANCE_ID, spec=["_client", "instance_id"], + ) + + table = _make_table(TABLE_ID, instance) + + assert table.name == table_data_client.table_path.return_value - self.assertIsInstance(row, ConditionalRow) - self.assertEqual(row._row_key, row_key) - self.assertEqual(row._table, table) - self.assertEqual(len(warned), 1) - self.assertIs(warned[0].category, PendingDeprecationWarning) +def _table_row_methods_helper(): + client = _make_client( + project="project-id", credentials=_make_credentials(), admin=True + ) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + row_key = b"row_key" + return table, row_key + + +def test_table_row_factory_direct(): + from google.cloud.bigtable.row import DirectRow + + table, row_key = _table_row_methods_helper() + with warnings.catch_warnings(record=True) as warned: + row = table.row(row_key) + + assert isinstance(row, DirectRow) + assert row._row_key == row_key + assert row._table == table + + assert len(warned) == 1 + assert warned[0].category is PendingDeprecationWarning + + +def test_table_row_factory_conditional(): + from google.cloud.bigtable.row import ConditionalRow + + table, row_key = _table_row_methods_helper() + filter_ = object() + + with warnings.catch_warnings(record=True) as warned: + row = table.row(row_key, filter_=filter_) + + assert isinstance(row, ConditionalRow) + assert row._row_key == row_key + assert row._table == table - def test_row_factory_append(self): - from google.cloud.bigtable.row import AppendRow + assert len(warned) == 1 + assert warned[0].category is PendingDeprecationWarning - table, row_key = self._row_methods_helper() +def test_table_row_factory_append(): + from google.cloud.bigtable.row import AppendRow + + table, row_key = _table_row_methods_helper() + + with warnings.catch_warnings(record=True) as warned: + row = table.row(row_key, append=True) + + assert isinstance(row, AppendRow) + assert row._row_key == row_key + assert row._table == table + + assert len(warned) == 1 + assert warned[0].category is PendingDeprecationWarning + + +def test_table_row_factory_failure(): + table, row_key = _table_row_methods_helper() + + with pytest.raises(ValueError): with warnings.catch_warnings(record=True) as warned: - row = table.row(row_key, append=True) + table.row(row_key, filter_=object(), append=True) - self.assertIsInstance(row, AppendRow) - self.assertEqual(row._row_key, row_key) - self.assertEqual(row._table, table) + assert len(warned) == 1 + assert warned[0].category is PendingDeprecationWarning - self.assertEqual(len(warned), 1) - self.assertIs(warned[0].category, PendingDeprecationWarning) - def test_row_factory_failure(self): - table, row_key = self._row_methods_helper() - with self.assertRaises(ValueError): - with warnings.catch_warnings(record=True) as warned: - table.row(row_key, filter_=object(), append=True) +def test_table_direct_row(): + from google.cloud.bigtable.row import DirectRow - self.assertEqual(len(warned), 1) - self.assertIs(warned[0].category, PendingDeprecationWarning) + table, row_key = _table_row_methods_helper() + row = table.direct_row(row_key) - def test_direct_row(self): - from google.cloud.bigtable.row import DirectRow + assert isinstance(row, DirectRow) + assert row._row_key == row_key + assert row._table == table - table, row_key = self._row_methods_helper() - row = table.direct_row(row_key) - self.assertIsInstance(row, DirectRow) - self.assertEqual(row._row_key, row_key) - self.assertEqual(row._table, table) +def test_table_conditional_row(): + from google.cloud.bigtable.row import ConditionalRow - def test_conditional_row(self): - from google.cloud.bigtable.row import ConditionalRow + table, row_key = _table_row_methods_helper() + filter_ = object() + row = table.conditional_row(row_key, filter_=filter_) - table, row_key = self._row_methods_helper() - filter_ = object() - row = table.conditional_row(row_key, filter_=filter_) + assert isinstance(row, ConditionalRow) + assert row._row_key == row_key + assert row._table == table - self.assertIsInstance(row, ConditionalRow) - self.assertEqual(row._row_key, row_key) - self.assertEqual(row._table, table) - def test_append_row(self): - from google.cloud.bigtable.row import AppendRow +def test_table_append_row(): + from google.cloud.bigtable.row import AppendRow - table, row_key = self._row_methods_helper() - row = table.append_row(row_key) + table, row_key = _table_row_methods_helper() + row = table.append_row(row_key) - self.assertIsInstance(row, AppendRow) - self.assertEqual(row._row_key, row_key) - self.assertEqual(row._table, table) + assert isinstance(row, AppendRow) + assert row._row_key == row_key + assert row._table == table - def test___eq__(self): - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table1 = self._make_one(self.TABLE_ID, instance) - table2 = self._make_one(self.TABLE_ID, instance) - self.assertEqual(table1, table2) - - def test___eq__type_differ(self): - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table1 = self._make_one(self.TABLE_ID, instance) - table2 = object() - self.assertNotEqual(table1, table2) - - def test___ne__same_value(self): - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table1 = self._make_one(self.TABLE_ID, instance) - table2 = self._make_one(self.TABLE_ID, instance) - comparison_val = table1 != table2 - self.assertFalse(comparison_val) - - def test___ne__(self): - table1 = self._make_one("table_id1", None) - table2 = self._make_one("table_id2", None) - self.assertNotEqual(table1, table2) - - def _create_test_helper(self, split_keys=[], column_families={}): - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) - from google.cloud.bigtable_admin_v2.types import table as table_pb2 - from google.cloud.bigtable_admin_v2.types import ( - bigtable_table_admin as table_admin_messages_v2_pb2, - ) - from google.cloud.bigtable.column_family import ColumnFamily - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) +def test_table___eq__(): + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table1 = _make_table(TABLE_ID, instance) + table2 = _make_table(TABLE_ID, instance) + assert table1 == table2 + + +def test_table___eq__type_differ(): + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table1 = _make_table(TABLE_ID, instance) + table2 = object() + assert not (table1 == table2) + + +def test_table___ne__same_value(): + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table1 = _make_table(TABLE_ID, instance) + table2 = _make_table(TABLE_ID, instance) + assert not (table1 != table2) + + +def test_table___ne__(): + table1 = _make_table("table_id1", None) + table2 = _make_table("table_id2", None) + assert table1 != table2 + + +def _make_table_api(): + from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( + client as bigtable_table_admin, + ) - # Patch API calls - client._table_admin_client = table_api + return mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - # Perform the method and check the result. - table.create(column_families=column_families, initial_split_keys=split_keys) - families = { - id: ColumnFamily(id, self, rule).to_pb() - for (id, rule) in column_families.items() +def _create_table_helper(split_keys=[], column_families={}): + from google.cloud.bigtable_admin_v2.types import table as table_pb2 + from google.cloud.bigtable_admin_v2.types import ( + bigtable_table_admin as table_admin_messages_v2_pb2, + ) + from google.cloud.bigtable.column_family import ColumnFamily + + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + + table_api = client._table_admin_client = _make_table_api() + + table.create(column_families=column_families, initial_split_keys=split_keys) + + families = { + id: ColumnFamily(id, table, rule).to_pb() + for (id, rule) in column_families.items() + } + + split = table_admin_messages_v2_pb2.CreateTableRequest.Split + splits = [split(key=split_key) for split_key in split_keys] + + table_api.create_table.assert_called_once_with( + request={ + "parent": INSTANCE_NAME, + "table": table_pb2.Table(column_families=families), + "table_id": TABLE_ID, + "initial_splits": splits, } + ) - split = table_admin_messages_v2_pb2.CreateTableRequest.Split - splits = [split(key=split_key) for split_key in split_keys] - table_api.create_table.assert_called_once_with( - request={ - "parent": self.INSTANCE_NAME, - "table": table_pb2.Table(column_families=families), - "table_id": self.TABLE_ID, - "initial_splits": splits, - } - ) +def test_table_create(): + _create_table_helper() - def test_create(self): - self._create_test_helper() - def test_create_with_families(self): - from google.cloud.bigtable.column_family import MaxVersionsGCRule +def test_table_create_with_families(): + from google.cloud.bigtable.column_family import MaxVersionsGCRule - families = {"family": MaxVersionsGCRule(5)} - self._create_test_helper(column_families=families) + families = {"family": MaxVersionsGCRule(5)} + _create_table_helper(column_families=families) - def test_create_with_split_keys(self): - self._create_test_helper(split_keys=[b"split1", b"split2", b"split3"]) - def test_exists(self): - from google.cloud.bigtable_admin_v2.types import ListTablesResponse - from google.cloud.bigtable_admin_v2.types import Table - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as table_admin_client, - ) - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - client as instance_admin_client, - ) - from google.api_core.exceptions import NotFound - from google.api_core.exceptions import BadRequest +def test_table_create_with_split_keys(): + _create_table_helper(split_keys=[b"split1", b"split2", b"split3"]) - table_api = mock.create_autospec(table_admin_client.BigtableTableAdminClient) - instance_api = mock.create_autospec( - instance_admin_client.BigtableInstanceAdminClient - ) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - # Create response_pb - response_pb = ListTablesResponse(tables=[Table(name=self.TABLE_NAME)]) - - # Patch API calls - client._table_admin_client = table_api - client._instance_admin_client = instance_api - bigtable_table_stub = client._table_admin_client - - bigtable_table_stub.get_table.side_effect = [ - response_pb, - NotFound("testing"), - BadRequest("testing"), - ] +def test_table_exists_hit(): + from google.cloud.bigtable_admin_v2.types import ListTablesResponse + from google.cloud.bigtable_admin_v2.types import Table + from google.cloud.bigtable import enums - client._table_admin_client = table_api - client._instance_admin_client = instance_api - bigtable_table_stub = client._table_admin_client - bigtable_table_stub.get_table.side_effect = [ - response_pb, - NotFound("testing"), - BadRequest("testing"), - ] + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = instance.table(TABLE_ID) - # Perform the method and check the result. - table1 = instance.table(self.TABLE_ID) - table2 = instance.table("table-id2") + response_pb = ListTablesResponse(tables=[Table(name=TABLE_NAME)]) + table_api = client._table_admin_client = _make_table_api() + table_api.get_table.return_value = response_pb - result = table1.exists() - self.assertEqual(True, result) + assert table.exists() - result = table2.exists() - self.assertEqual(False, result) + expected_request = { + "name": table.name, + "view": enums.Table.View.NAME_ONLY, + } + table_api.get_table.assert_called_once_with(request=expected_request) - with self.assertRaises(BadRequest): - table2.exists() - def test_delete(self): - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) +def test_table_exists_miss(): + from google.api_core.exceptions import NotFound + from google.cloud.bigtable import enums - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = instance.table("nonesuch-table-id2") - # Patch API calls - client._table_admin_client = table_api + table_api = client._table_admin_client = _make_table_api() + table_api.get_table.side_effect = NotFound("testing") - # Create expected_result. - expected_result = None # delete() has no return value. + assert not table.exists() - # Perform the method and check the result. - result = table.delete() - self.assertEqual(result, expected_result) + expected_request = { + "name": table.name, + "view": enums.Table.View.NAME_ONLY, + } + table_api.get_table.assert_called_once_with(request=expected_request) - def _list_column_families_helper(self): - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) +def test_table_exists_error(): + from google.api_core.exceptions import BadRequest + from google.cloud.bigtable import enums - # Create response_pb - COLUMN_FAMILY_ID = "foo" - column_family = _ColumnFamilyPB() - response_pb = _TablePB(column_families={COLUMN_FAMILY_ID: column_family}) + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) - # Patch the stub used by the API method. - client._table_admin_client = table_api - bigtable_table_stub = client._table_admin_client - bigtable_table_stub.get_table.side_effect = [response_pb] + table_api = client._table_admin_client = _make_table_api() + table_api.get_table.side_effect = BadRequest("testing") - # Create expected_result. - expected_result = {COLUMN_FAMILY_ID: table.column_family(COLUMN_FAMILY_ID)} + table = instance.table(TABLE_ID) - # Perform the method and check the result. - result = table.list_column_families() - self.assertEqual(result, expected_result) + with pytest.raises(BadRequest): + table.exists() - def test_list_column_families(self): - self._list_column_families_helper() + expected_request = { + "name": table.name, + "view": enums.Table.View.NAME_ONLY, + } + table_api.get_table.assert_called_once_with(request=expected_request) - def test_get_cluster_states(self): - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) - from google.cloud.bigtable.enums import Table as enum_table - from google.cloud.bigtable.table import ClusterState - INITIALIZING = enum_table.ReplicationState.INITIALIZING - PLANNED_MAINTENANCE = enum_table.ReplicationState.PLANNED_MAINTENANCE - READY = enum_table.ReplicationState.READY +def test_table_delete(): + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) - - response_pb = _TablePB( - cluster_states={ - "cluster-id1": _ClusterStatePB(INITIALIZING), - "cluster-id2": _ClusterStatePB(PLANNED_MAINTENANCE), - "cluster-id3": _ClusterStatePB(READY), - } - ) + table_api = client._table_admin_client = _make_table_api() - # Patch the stub used by the API method. - client._table_admin_client = table_api - bigtable_table_stub = client._table_admin_client + assert table.delete() is None - bigtable_table_stub.get_table.side_effect = [response_pb] + table_api.delete_table.assert_called_once_with(request={"name": table.name}) - # build expected result - expected_result = { - "cluster-id1": ClusterState(INITIALIZING), - "cluster-id2": ClusterState(PLANNED_MAINTENANCE), - "cluster-id3": ClusterState(READY), - } - # Perform the method and check the result. - result = table.get_cluster_states() - self.assertEqual(result, expected_result) +def _table_list_column_families_helper(): + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) - def test_get_encryption_info(self): - from google.rpc.code_pb2 import Code - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) - from google.cloud.bigtable.encryption_info import EncryptionInfo - from google.cloud.bigtable.enums import EncryptionInfo as enum_crypto - from google.cloud.bigtable.error import Status + # Create response_pb + COLUMN_FAMILY_ID = "foo" + column_family = _ColumnFamilyPB() + response_pb = _TablePB(column_families={COLUMN_FAMILY_ID: column_family}) - ENCRYPTION_TYPE_UNSPECIFIED = ( - enum_crypto.EncryptionType.ENCRYPTION_TYPE_UNSPECIFIED - ) - GOOGLE_DEFAULT_ENCRYPTION = enum_crypto.EncryptionType.GOOGLE_DEFAULT_ENCRYPTION - CUSTOMER_MANAGED_ENCRYPTION = ( - enum_crypto.EncryptionType.CUSTOMER_MANAGED_ENCRYPTION - ) + # Patch the stub used by the API method. + table_api = client._table_admin_client = _make_table_api() + table_api.get_table.return_value = response_pb - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) - - response_pb = _TablePB( - cluster_states={ - "cluster-id1": _ClusterStateEncryptionInfoPB( - encryption_type=ENCRYPTION_TYPE_UNSPECIFIED, - encryption_status=_StatusPB(Code.OK, "Status OK"), - ), - "cluster-id2": _ClusterStateEncryptionInfoPB( - encryption_type=GOOGLE_DEFAULT_ENCRYPTION, - ), - "cluster-id3": _ClusterStateEncryptionInfoPB( - encryption_type=CUSTOMER_MANAGED_ENCRYPTION, - encryption_status=_StatusPB( - Code.UNKNOWN, "Key version is not yet known." - ), - kms_key_version="UNKNOWN", - ), - } - ) + # Create expected_result. + expected_result = {COLUMN_FAMILY_ID: table.column_family(COLUMN_FAMILY_ID)} - # Patch the stub used by the API method. - client._table_admin_client = table_api - bigtable_table_stub = client._table_admin_client + # Perform the method and check the result. + result = table.list_column_families() - bigtable_table_stub.get_table.side_effect = [response_pb] + assert result == expected_result - # build expected result - expected_result = { - "cluster-id1": ( - EncryptionInfo( - encryption_type=ENCRYPTION_TYPE_UNSPECIFIED, - encryption_status=Status(_StatusPB(Code.OK, "Status OK")), - kms_key_version="", - ), + table_api.get_table.assert_called_once_with(request={"name": table.name}) + + +def test_table_list_column_families(): + _table_list_column_families_helper() + + +def test_table_get_cluster_states(): + from google.cloud.bigtable.enums import Table as enum_table + from google.cloud.bigtable.table import ClusterState + + INITIALIZING = enum_table.ReplicationState.INITIALIZING + PLANNED_MAINTENANCE = enum_table.ReplicationState.PLANNED_MAINTENANCE + READY = enum_table.ReplicationState.READY + + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + + response_pb = _TablePB( + cluster_states={ + "cluster-id1": _ClusterStatePB(INITIALIZING), + "cluster-id2": _ClusterStatePB(PLANNED_MAINTENANCE), + "cluster-id3": _ClusterStatePB(READY), + } + ) + + # Patch the stub used by the API method. + table_api = client._table_admin_client = _make_table_api() + table_api.get_table.return_value = response_pb + + # build expected result + expected_result = { + "cluster-id1": ClusterState(INITIALIZING), + "cluster-id2": ClusterState(PLANNED_MAINTENANCE), + "cluster-id3": ClusterState(READY), + } + + # Perform the method and check the result. + result = table.get_cluster_states() + + assert result == expected_result + + expected_request = { + "name": table.name, + "view": enum_table.View.REPLICATION_VIEW, + } + table_api.get_table.assert_called_once_with(request=expected_request) + + +def test_table_get_encryption_info(): + from google.rpc.code_pb2 import Code + from google.cloud.bigtable.encryption_info import EncryptionInfo + from google.cloud.bigtable.enums import EncryptionInfo as enum_crypto + from google.cloud.bigtable.enums import Table as enum_table + from google.cloud.bigtable.error import Status + + ENCRYPTION_TYPE_UNSPECIFIED = enum_crypto.EncryptionType.ENCRYPTION_TYPE_UNSPECIFIED + GOOGLE_DEFAULT_ENCRYPTION = enum_crypto.EncryptionType.GOOGLE_DEFAULT_ENCRYPTION + CUSTOMER_MANAGED_ENCRYPTION = enum_crypto.EncryptionType.CUSTOMER_MANAGED_ENCRYPTION + + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + + response_pb = _TablePB( + cluster_states={ + "cluster-id1": _ClusterStateEncryptionInfoPB( + encryption_type=ENCRYPTION_TYPE_UNSPECIFIED, + encryption_status=_StatusPB(Code.OK, "Status OK"), ), - "cluster-id2": ( - EncryptionInfo( - encryption_type=GOOGLE_DEFAULT_ENCRYPTION, - encryption_status=Status(_StatusPB(0, "")), - kms_key_version="", - ), + "cluster-id2": _ClusterStateEncryptionInfoPB( + encryption_type=GOOGLE_DEFAULT_ENCRYPTION, ), - "cluster-id3": ( - EncryptionInfo( - encryption_type=CUSTOMER_MANAGED_ENCRYPTION, - encryption_status=Status( - _StatusPB(Code.UNKNOWN, "Key version is not yet known.") - ), - kms_key_version="UNKNOWN", + "cluster-id3": _ClusterStateEncryptionInfoPB( + encryption_type=CUSTOMER_MANAGED_ENCRYPTION, + encryption_status=_StatusPB( + Code.UNKNOWN, "Key version is not yet known." ), + kms_key_version="UNKNOWN", ), } + ) - # Perform the method and check the result. - result = table.get_encryption_info() - self.assertEqual(result, expected_result) + # Patch the stub used by the API method. + table_api = client._table_admin_client = _make_table_api() + table_api.get_table.return_value = response_pb + + # build expected result + expected_result = { + "cluster-id1": ( + EncryptionInfo( + encryption_type=ENCRYPTION_TYPE_UNSPECIFIED, + encryption_status=Status(_StatusPB(Code.OK, "Status OK")), + kms_key_version="", + ), + ), + "cluster-id2": ( + EncryptionInfo( + encryption_type=GOOGLE_DEFAULT_ENCRYPTION, + encryption_status=Status(_StatusPB(0, "")), + kms_key_version="", + ), + ), + "cluster-id3": ( + EncryptionInfo( + encryption_type=CUSTOMER_MANAGED_ENCRYPTION, + encryption_status=Status( + _StatusPB(Code.UNKNOWN, "Key version is not yet known.") + ), + kms_key_version="UNKNOWN", + ), + ), + } - def _read_row_helper(self, chunks, expected_result, app_profile_id=None): + # Perform the method and check the result. + result = table.get_encryption_info() - from google.cloud._testing import _Monkey - from google.cloud.bigtable import table as MUT - from google.cloud.bigtable.row_set import RowSet - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) - from google.cloud.bigtable.row_filters import RowSampleFilter + assert result == expected_result + expected_request = { + "name": table.name, + "view": enum_table.View.ENCRYPTION_VIEW, + } + table_api.get_table.assert_called_once_with(request=expected_request) - data_api = mock.create_autospec(BigtableClient) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance, app_profile_id=app_profile_id) - # Create request_pb - request_pb = object() # Returned by our mock. - mock_created = [] +def _make_data_api(): + from google.cloud.bigtable_v2.services.bigtable import BigtableClient - def mock_create_row_request(table_name, **kwargs): - mock_created.append((table_name, kwargs)) - return request_pb + return mock.create_autospec(BigtableClient) - # Create response_iterator - if chunks is None: - response_iterator = iter(()) # no responses at all - else: - response_pb = _ReadRowsResponsePB(chunks=chunks) - response_iterator = iter([response_pb]) - - # Patch the stub used by the API method. - client._table_data_client = data_api - client._table_admin_client = table_api - client._table_data_client.read_rows.side_effect = [response_iterator] - table._instance._client._table_data_client = client._table_data_client - # Perform the method and check the result. - filter_obj = RowSampleFilter(0.33) - result = None - with _Monkey(MUT, _create_row_request=mock_create_row_request): - result = table.read_row(self.ROW_KEY, filter_=filter_obj) - row_set = RowSet() - row_set.add_row_key(self.ROW_KEY) - expected_request = [ - ( - table.name, - { - "end_inclusive": False, - "row_set": row_set, - "app_profile_id": app_profile_id, - "end_key": None, - "limit": None, - "start_key": None, - "filter_": filter_obj, - }, - ) - ] - self.assertEqual(result, expected_result) - self.assertEqual(mock_created, expected_request) - - def test_read_row_miss_no__responses(self): - self._read_row_helper(None, None) - - def test_read_row_miss_no_chunks_in_response(self): - chunks = [] - self._read_row_helper(chunks, None) - - def test_read_row_complete(self): - from google.cloud.bigtable.row_data import Cell - from google.cloud.bigtable.row_data import PartialRowData - - app_profile_id = "app-profile-id" - chunk = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - ) - chunks = [chunk] - expected_result = PartialRowData(row_key=self.ROW_KEY) - family = expected_result._cells.setdefault(self.FAMILY_NAME, {}) - column = family.setdefault(self.QUALIFIER, []) - column.append(Cell.from_pb(chunk)) - self._read_row_helper(chunks, expected_result, app_profile_id) - - def test_read_row_more_than_one_row_returned(self): - app_profile_id = "app-profile-id" - chunk_1 = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - )._pb - chunk_2 = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY_2, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - )._pb - - chunks = [chunk_1, chunk_2] - with self.assertRaises(ValueError): - self._read_row_helper(chunks, None, app_profile_id) - - def test_read_row_still_partial(self): - chunk = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - ) - # No "commit row". - chunks = [chunk] - with self.assertRaises(ValueError): - self._read_row_helper(chunks, None) - - def _mutate_rows_helper( - self, mutation_timeout=None, app_profile_id=None, retry=None, timeout=None - ): - from google.rpc.status_pb2 import Status - from google.cloud.bigtable.table import DEFAULT_RETRY - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - client._table_admin_client = table_api - ctor_kwargs = {} +def _table_read_row_helper(chunks, expected_result, app_profile_id=None): + from google.cloud._testing import _Monkey + from google.cloud.bigtable import table as MUT + from google.cloud.bigtable.row_set import RowSet + from google.cloud.bigtable.row_filters import RowSampleFilter + + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance, app_profile_id=app_profile_id) + + # Create request_pb + request_pb = object() # Returned by our mock. + mock_created = [] + + def mock_create_row_request(table_name, **kwargs): + mock_created.append((table_name, kwargs)) + return request_pb - if mutation_timeout is not None: - ctor_kwargs["mutation_timeout"] = mutation_timeout + # Create response_iterator + if chunks is None: + response_iterator = iter(()) # no responses at all + else: + response_pb = _ReadRowsResponsePB(chunks=chunks) + response_iterator = iter([response_pb]) - if app_profile_id is not None: - ctor_kwargs["app_profile_id"] = app_profile_id + data_api = client._table_data_client = _make_data_api() + data_api.read_rows.return_value = response_iterator - table = self._make_one(self.TABLE_ID, instance, **ctor_kwargs) + filter_obj = RowSampleFilter(0.33) - rows = [mock.MagicMock(), mock.MagicMock()] - response = [Status(code=0), Status(code=1)] - instance_mock = mock.Mock(return_value=response) - klass_mock = mock.patch( - "google.cloud.bigtable.table._RetryableMutateRowsWorker", - new=mock.MagicMock(return_value=instance_mock), + with _Monkey(MUT, _create_row_request=mock_create_row_request): + result = table.read_row(ROW_KEY, filter_=filter_obj) + + row_set = RowSet() + row_set.add_row_key(ROW_KEY) + expected_request = [ + ( + table.name, + { + "end_inclusive": False, + "row_set": row_set, + "app_profile_id": app_profile_id, + "end_key": None, + "limit": None, + "start_key": None, + "filter_": filter_obj, + }, ) + ] + assert result == expected_result + assert mock_created == expected_request - call_kwargs = {} + data_api.read_rows.assert_called_once_with(request_pb, timeout=61.0) - if retry is not None: - call_kwargs["retry"] = retry - if timeout is not None: - expected_timeout = call_kwargs["timeout"] = timeout - else: - expected_timeout = mutation_timeout +def test_table_read_row_miss_no__responses(): + _table_read_row_helper(None, None) - with klass_mock: - statuses = table.mutate_rows(rows, **call_kwargs) - result = [status.code for status in statuses] - expected_result = [0, 1] - self.assertEqual(result, expected_result) - - klass_mock.new.assert_called_once_with( - client, - self.TABLE_NAME, - rows, - app_profile_id=app_profile_id, - timeout=expected_timeout, - ) +def test_table_read_row_miss_no_chunks_in_response(): + chunks = [] + _table_read_row_helper(chunks, None) - if retry is not None: - instance_mock.assert_called_once_with(retry=retry) - else: - instance_mock.assert_called_once_with(retry=DEFAULT_RETRY) - - def test_mutate_rows_w_default_mutation_timeout_app_profile_id(self): - self._mutate_rows_helper() - - def test_mutate_rows_w_mutation_timeout(self): - mutation_timeout = 123 - self._mutate_rows_helper(mutation_timeout=mutation_timeout) - - def test_mutate_rows_w_app_profile_id(self): - app_profile_id = "profile-123" - self._mutate_rows_helper(app_profile_id=app_profile_id) - - def test_mutate_rows_w_retry(self): - retry = mock.Mock() - self._mutate_rows_helper(retry=retry) - - def test_mutate_rows_w_timeout_arg(self): - timeout = 123 - self._mutate_rows_helper(timeout=timeout) - - def test_mutate_rows_w_mutation_timeout_and_timeout_arg(self): - mutation_timeout = 123 - timeout = 456 - self._mutate_rows_helper(mutation_timeout=mutation_timeout, timeout=timeout) - - def test_read_rows(self): - from google.cloud._testing import _Monkey - from google.cloud.bigtable.row_data import PartialRowsData - from google.cloud.bigtable import table as MUT - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) - from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS - data_api = mock.create_autospec(BigtableClient) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - client._table_data_client = data_api - client._table_admin_client = table_api - instance = client.instance(instance_id=self.INSTANCE_ID) - app_profile_id = "app-profile-id" - table = self._make_one(self.TABLE_ID, instance, app_profile_id=app_profile_id) - - # Create request_pb - request = object() # Returned by our mock. - retry = DEFAULT_RETRY_READ_ROWS - mock_created = [] - - def mock_create_row_request(table_name, **kwargs): - mock_created.append((table_name, kwargs)) - return request - - # Create expected_result. - expected_result = PartialRowsData( - client._table_data_client.transport.read_rows, request, retry - ) +def test_table_read_row_complete(): + from google.cloud.bigtable.row_data import Cell + from google.cloud.bigtable.row_data import PartialRowData - # Perform the method and check the result. - start_key = b"start-key" - end_key = b"end-key" - filter_obj = object() - limit = 22 - with _Monkey(MUT, _create_row_request=mock_create_row_request): - result = table.read_rows( - start_key=start_key, - end_key=end_key, - filter_=filter_obj, - limit=limit, - retry=retry, - ) + app_profile_id = "app-profile-id" + chunk = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=True, + ) + chunks = [chunk] + expected_result = PartialRowData(row_key=ROW_KEY) + family = expected_result._cells.setdefault(FAMILY_NAME, {}) + column = family.setdefault(QUALIFIER, []) + column.append(Cell.from_pb(chunk)) + + _table_read_row_helper(chunks, expected_result, app_profile_id) + + +def test_table_read_row_more_than_one_row_returned(): + app_profile_id = "app-profile-id" + chunk_1 = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=True, + )._pb + chunk_2 = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY_2, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=True, + )._pb + + chunks = [chunk_1, chunk_2] + + with pytest.raises(ValueError): + _table_read_row_helper(chunks, None, app_profile_id) + + +def test_table_read_row_still_partial(): + chunk = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + ) + chunks = [chunk] # No "commit row". - self.assertEqual(result.rows, expected_result.rows) - self.assertEqual(result.retry, expected_result.retry) - created_kwargs = { - "start_key": start_key, - "end_key": end_key, - "filter_": filter_obj, - "limit": limit, - "end_inclusive": False, - "app_profile_id": app_profile_id, - "row_set": None, - } - self.assertEqual(mock_created, [(table.name, created_kwargs)]) + with pytest.raises(ValueError): + _table_read_row_helper(chunks, None) - def test_read_retry_rows(self): - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) - from google.api_core import retry - data_api = mock.create_autospec(BigtableClient) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - client._table_data_client = data_api - client._table_admin_client = table_api - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) - - retry_read_rows = retry.Retry(predicate=_read_rows_retry_exception) - - # Create response_iterator - chunk_1 = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY_1, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - ) +def _table_mutate_rows_helper( + mutation_timeout=None, app_profile_id=None, retry=None, timeout=None +): + from google.rpc.status_pb2 import Status + from google.cloud.bigtable.table import DEFAULT_RETRY - chunk_2 = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY_2, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - ) + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + ctor_kwargs = {} - response_1 = _ReadRowsResponseV2([chunk_1]) - response_2 = _ReadRowsResponseV2([chunk_2]) - response_failure_iterator_1 = _MockFailureIterator_1() - response_failure_iterator_2 = _MockFailureIterator_2([response_1]) - response_iterator = _MockReadRowsIterator(response_2) - - # Patch the stub used by the API method. - data_api.table_path.return_value = f"projects/{self.PROJECT_ID}/instances/{self.INSTANCE_ID}/tables/{self.TABLE_ID}" - - client._table_data_client.read_rows = mock.Mock( - side_effect=[ - response_failure_iterator_1, - response_failure_iterator_2, - response_iterator, - ] - ) + if mutation_timeout is not None: + ctor_kwargs["mutation_timeout"] = mutation_timeout - table._instance._client._table_data_client = data_api - table._instance._client._table_admin_client = table_api - rows = [] - for row in table.read_rows( - start_key=self.ROW_KEY_1, end_key=self.ROW_KEY_2, retry=retry_read_rows - ): - rows.append(row) + if app_profile_id is not None: + ctor_kwargs["app_profile_id"] = app_profile_id - result = rows[1] - self.assertEqual(result.row_key, self.ROW_KEY_2) + table = _make_table(TABLE_ID, instance, **ctor_kwargs) - def test_yield_retry_rows(self): - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) + rows = [mock.MagicMock(), mock.MagicMock()] + response = [Status(code=0), Status(code=1)] + instance_mock = mock.Mock(return_value=response) + klass_mock = mock.patch( + "google.cloud.bigtable.table._RetryableMutateRowsWorker", + new=mock.MagicMock(return_value=instance_mock), + ) - data_api = mock.create_autospec(BigtableClient) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - client._table_data_client = data_api - client._table_admin_client = table_api - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) - - # Create response_iterator - chunk_1 = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY_1, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - ) + call_kwargs = {} - chunk_2 = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY_2, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - ) + if retry is not None: + call_kwargs["retry"] = retry - response_1 = _ReadRowsResponseV2([chunk_1]) - response_2 = _ReadRowsResponseV2([chunk_2]) - response_failure_iterator_1 = _MockFailureIterator_1() - response_failure_iterator_2 = _MockFailureIterator_2([response_1]) - response_iterator = _MockReadRowsIterator(response_2) - - # Patch the stub used by the API method. - data_api.table_path.return_value = f"projects/{self.PROJECT_ID}/instances/{self.INSTANCE_ID}/tables/{self.TABLE_ID}" - table_api.table_path.return_value = f"projects/{self.PROJECT_ID}/instances/{self.INSTANCE_ID}/tables/{self.TABLE_ID}" - - table._instance._client._table_data_client = data_api - table._instance._client._table_admin_client = table_api - client._table_data_client.read_rows.side_effect = [ - response_failure_iterator_1, - response_failure_iterator_2, - response_iterator, - ] + if timeout is not None: + expected_timeout = call_kwargs["timeout"] = timeout + else: + expected_timeout = mutation_timeout - rows = [] - with warnings.catch_warnings(record=True) as warned: - for row in table.yield_rows( - start_key=self.ROW_KEY_1, end_key=self.ROW_KEY_2 - ): - rows.append(row) + with klass_mock: + statuses = table.mutate_rows(rows, **call_kwargs) - self.assertEqual(len(warned), 1) - self.assertIs(warned[0].category, DeprecationWarning) + result = [status.code for status in statuses] + expected_result = [0, 1] + assert result == expected_result - result = rows[1] - self.assertEqual(result.row_key, self.ROW_KEY_2) + klass_mock.new.assert_called_once_with( + client, + TABLE_NAME, + rows, + app_profile_id=app_profile_id, + timeout=expected_timeout, + ) - def test_yield_rows_with_row_set(self): - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) - from google.cloud.bigtable.row_set import RowSet - from google.cloud.bigtable.row_set import RowRange - - data_api = mock.create_autospec(BigtableClient) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - client._table_data_client = data_api - client._table_admin_client = table_api - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) - - # Create response_iterator - chunk_1 = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY_1, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - ) + if retry is not None: + instance_mock.assert_called_once_with(retry=retry) + else: + instance_mock.assert_called_once_with(retry=DEFAULT_RETRY) - chunk_2 = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY_2, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - ) - chunk_3 = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY_3, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - ) +def test_table_mutate_rows_w_default_mutation_timeout_app_profile_id(): + _table_mutate_rows_helper() - response_1 = _ReadRowsResponseV2([chunk_1]) - response_2 = _ReadRowsResponseV2([chunk_2]) - response_3 = _ReadRowsResponseV2([chunk_3]) - response_iterator = _MockReadRowsIterator(response_1, response_2, response_3) - # Patch the stub used by the API method. - data_api.table_path.return_value = f"projects/{self.PROJECT_ID}/instances/{self.INSTANCE_ID}/tables/{self.TABLE_ID}" - table_api.table_path.return_value = f"projects/{self.PROJECT_ID}/instances/{self.INSTANCE_ID}/tables/{self.TABLE_ID}" +def test_table_mutate_rows_w_mutation_timeout(): + mutation_timeout = 123 + _table_mutate_rows_helper(mutation_timeout=mutation_timeout) - table._instance._client._table_data_client = data_api - table._instance._client._table_admin_client = table_api - client._table_data_client.read_rows.side_effect = [response_iterator] - rows = [] - row_set = RowSet() - row_set.add_row_range( - RowRange(start_key=self.ROW_KEY_1, end_key=self.ROW_KEY_2) - ) - row_set.add_row_key(self.ROW_KEY_3) +def test_table_mutate_rows_w_app_profile_id(): + app_profile_id = "profile-123" + _table_mutate_rows_helper(app_profile_id=app_profile_id) - with warnings.catch_warnings(record=True) as warned: - for row in table.yield_rows(row_set=row_set): - rows.append(row) - self.assertEqual(len(warned), 1) - self.assertIs(warned[0].category, DeprecationWarning) +def test_table_mutate_rows_w_retry(): + retry = mock.Mock() + _table_mutate_rows_helper(retry=retry) - self.assertEqual(rows[0].row_key, self.ROW_KEY_1) - self.assertEqual(rows[1].row_key, self.ROW_KEY_2) - self.assertEqual(rows[2].row_key, self.ROW_KEY_3) - def test_sample_row_keys(self): - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) +def test_table_mutate_rows_w_timeout_arg(): + timeout = 123 + _table_mutate_rows_helper(timeout=timeout) - data_api = mock.create_autospec(BigtableClient) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - client._table_data_client = data_api - client._table_admin_client = table_api - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) - # Create response_iterator - response_iterator = object() # Just passed to a mock. +def test_table_mutate_rows_w_mutation_timeout_and_timeout_arg(): + mutation_timeout = 123 + timeout = 456 + _table_mutate_rows_helper(mutation_timeout=mutation_timeout, timeout=timeout) - # Patch the stub used by the API method. - client._table_data_client.sample_row_keys.side_effect = [[response_iterator]] - # Create expected_result. - expected_result = response_iterator +def test_table_read_rows(): + from google.cloud._testing import _Monkey + from google.cloud.bigtable.row_data import PartialRowsData + from google.cloud.bigtable import table as MUT + from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS - # Perform the method and check the result. - result = table.sample_row_keys() - self.assertEqual(result[0], expected_result) + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + data_api = client._table_data_client = _make_data_api() + instance = client.instance(instance_id=INSTANCE_ID) + app_profile_id = "app-profile-id" + table = _make_table(TABLE_ID, instance, app_profile_id=app_profile_id) - def test_truncate(self): - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) + # Create request_pb + request_pb = object() # Returned by our mock. + retry = DEFAULT_RETRY_READ_ROWS + mock_created = [] - data_api = mock.create_autospec(BigtableClient) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - client._table_data_client = data_api - client._table_admin_client = table_api - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) + def mock_create_row_request(table_name, **kwargs): + mock_created.append((table_name, kwargs)) + return request_pb - expected_result = None # truncate() has no return value. - with mock.patch("google.cloud.bigtable.table.Table.name", new=self.TABLE_NAME): - result = table.truncate() + # Create expected_result. + expected_result = PartialRowsData( + client._table_data_client.transport.read_rows, request_pb, retry + ) - table_api.drop_row_range.assert_called_once_with( - request={"name": self.TABLE_NAME, "delete_all_data_from_table": True} - ) + # Perform the method and check the result. + start_key = b"start-key" + end_key = b"end-key" + filter_obj = object() + limit = 22 + with _Monkey(MUT, _create_row_request=mock_create_row_request): + result = table.read_rows( + start_key=start_key, + end_key=end_key, + filter_=filter_obj, + limit=limit, + retry=retry, + ) + + assert result.rows == expected_result.rows + assert result.retry == expected_result.retry + created_kwargs = { + "start_key": start_key, + "end_key": end_key, + "filter_": filter_obj, + "limit": limit, + "end_inclusive": False, + "app_profile_id": app_profile_id, + "row_set": None, + } + assert mock_created == [(table.name, created_kwargs)] + + data_api.read_rows.assert_called_once_with(request_pb, timeout=61.0) + + +def test_table_read_retry_rows(): + from google.api_core import retry + from google.cloud.bigtable.table import _create_row_request + + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + data_api = client._table_data_client = _make_data_api() + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + + retry_read_rows = retry.Retry(predicate=_read_rows_retry_exception) + + # Create response_iterator + chunk_1 = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY_1, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=True, + ) - self.assertEqual(result, expected_result) + chunk_2 = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY_2, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=True, + ) - def test_truncate_w_timeout(self): - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) + response_1 = _ReadRowsResponseV2([chunk_1]) + response_2 = _ReadRowsResponseV2([chunk_2]) + response_failure_iterator_1 = _MockFailureIterator_1() + response_failure_iterator_2 = _MockFailureIterator_2([response_1]) + response_iterator = _MockReadRowsIterator(response_2) - data_api = mock.create_autospec(BigtableClient) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True + data_api.table_path.return_value = ( + f"projects/{PROJECT_ID}/instances/{INSTANCE_ID}/tables/{TABLE_ID}" + ) + + data_api.read_rows.side_effect = [ + response_failure_iterator_1, + response_failure_iterator_2, + response_iterator, + ] + + rows = [ + row + for row in table.read_rows( + start_key=ROW_KEY_1, end_key=ROW_KEY_2, retry=retry_read_rows ) - client._table_data_client = data_api - client._table_admin_client = table_api - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) + ] - expected_result = None # truncate() has no return value. + result = rows[1] + assert result.row_key == ROW_KEY_2 - timeout = 120 - result = table.truncate(timeout=timeout) + expected_request = _create_row_request( + table.name, start_key=ROW_KEY_1, end_key=ROW_KEY_2, + ) + data_api.read_rows.mock_calls = [expected_request] * 3 - self.assertEqual(result, expected_result) - def test_drop_by_prefix(self): - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) +def test_table_yield_retry_rows(): + from google.cloud.bigtable.table import _create_row_request - data_api = mock.create_autospec(BigtableClient) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - client._table_data_client = data_api - client._table_admin_client = table_api - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + + # Create response_iterator + chunk_1 = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY_1, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=True, + ) + + chunk_2 = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY_2, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=True, + ) - expected_result = None # drop_by_prefix() has no return value. + response_1 = _ReadRowsResponseV2([chunk_1]) + response_2 = _ReadRowsResponseV2([chunk_2]) + response_failure_iterator_1 = _MockFailureIterator_1() + response_failure_iterator_2 = _MockFailureIterator_2([response_1]) + response_iterator = _MockReadRowsIterator(response_2) - row_key_prefix = "row-key-prefix" + data_api = client._table_data_client = _make_data_api() + data_api.table_path.return_value = ( + f"projects/{PROJECT_ID}/instances/{INSTANCE_ID}/tables/{TABLE_ID}" + ) + data_api.read_rows.side_effect = [ + response_failure_iterator_1, + response_failure_iterator_2, + response_iterator, + ] + + rows = [] + with warnings.catch_warnings(record=True) as warned: + for row in table.yield_rows(start_key=ROW_KEY_1, end_key=ROW_KEY_2): + rows.append(row) - result = table.drop_by_prefix(row_key_prefix=row_key_prefix) + assert len(warned) == 1 + assert warned[0].category is DeprecationWarning - self.assertEqual(result, expected_result) + result = rows[1] + assert result.row_key == ROW_KEY_2 - def test_drop_by_prefix_w_timeout(self): - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) + expected_request = _create_row_request( + table.name, start_key=ROW_KEY_1, end_key=ROW_KEY_2, + ) + data_api.read_rows.mock_calls = [expected_request] * 3 + + +def test_table_yield_rows_with_row_set(): + from google.cloud.bigtable.row_set import RowSet + from google.cloud.bigtable.row_set import RowRange + from google.cloud.bigtable.table import _create_row_request + + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + + # Create response_iterator + chunk_1 = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY_1, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=True, + ) - data_api = mock.create_autospec(BigtableClient) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - client._table_data_client = data_api - client._table_admin_client = table_api - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) + chunk_2 = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY_2, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=True, + ) - expected_result = None # drop_by_prefix() has no return value. + chunk_3 = _ReadRowsResponseCellChunkPB( + row_key=ROW_KEY_3, + family_name=FAMILY_NAME, + qualifier=QUALIFIER, + timestamp_micros=TIMESTAMP_MICROS, + value=VALUE, + commit_row=True, + ) - row_key_prefix = "row-key-prefix" + response_1 = _ReadRowsResponseV2([chunk_1]) + response_2 = _ReadRowsResponseV2([chunk_2]) + response_3 = _ReadRowsResponseV2([chunk_3]) + response_iterator = _MockReadRowsIterator(response_1, response_2, response_3) - timeout = 120 - result = table.drop_by_prefix(row_key_prefix=row_key_prefix, timeout=timeout) + data_api = client._table_data_client = _make_data_api() + data_api.table_path.return_value = ( + f"projects/{PROJECT_ID}/instances/{INSTANCE_ID}/tables/{TABLE_ID}" + ) + data_api.read_rows.side_effect = [response_iterator] - self.assertEqual(result, expected_result) + rows = [] + row_set = RowSet() + row_set.add_row_range(RowRange(start_key=ROW_KEY_1, end_key=ROW_KEY_2)) + row_set.add_row_key(ROW_KEY_3) - def test_mutations_batcher_factory(self): - flush_count = 100 - max_row_bytes = 1000 - table = self._make_one(self.TABLE_ID, None) - mutation_batcher = table.mutations_batcher( - flush_count=flush_count, max_row_bytes=max_row_bytes - ) + with warnings.catch_warnings(record=True) as warned: + for row in table.yield_rows(row_set=row_set): + rows.append(row) - self.assertEqual(mutation_batcher.table.table_id, self.TABLE_ID) - self.assertEqual(mutation_batcher.flush_count, flush_count) - self.assertEqual(mutation_batcher.max_row_bytes, max_row_bytes) + assert len(warned) == 1 + assert warned[0].category is DeprecationWarning - def test_get_iam_policy(self): - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) - from google.iam.v1 import policy_pb2 - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE + assert rows[0].row_key == ROW_KEY_1 + assert rows[1].row_key == ROW_KEY_2 + assert rows[2].row_key == ROW_KEY_3 - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) + expected_request = _create_row_request( + table.name, start_key=ROW_KEY_1, end_key=ROW_KEY_2, + ) + expected_request.rows.row_keys.append(ROW_KEY_3) + data_api.read_rows.assert_called_once_with(expected_request, timeout=61.0) - version = 1 - etag = b"etag_v1" - members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] - bindings = [{"role": BIGTABLE_ADMIN_ROLE, "members": members}] - iam_policy = policy_pb2.Policy(version=version, etag=etag, bindings=bindings) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - client._table_admin_client = table_api - table_api.get_iam_policy.return_value = iam_policy +def test_table_sample_row_keys(): + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + response_iterator = object() - result = table.get_iam_policy() + data_api = client._table_data_client = _make_data_api() + data_api.sample_row_keys.return_value = [response_iterator] - table_api.get_iam_policy.assert_called_once_with( - request={"resource": table.name} - ) - self.assertEqual(result.version, version) - self.assertEqual(result.etag, etag) - admins = result.bigtable_admins - self.assertEqual(len(admins), len(members)) - for found, expected in zip(sorted(admins), sorted(members)): - self.assertEqual(found, expected) - - def test_set_iam_policy(self): - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) - from google.iam.v1 import policy_pb2 - from google.cloud.bigtable.policy import Policy - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE + result = table.sample_row_keys() - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) - - version = 1 - etag = b"etag_v1" - members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] - bindings = [{"role": BIGTABLE_ADMIN_ROLE, "members": sorted(members)}] - iam_policy_pb = policy_pb2.Policy(version=version, etag=etag, bindings=bindings) - - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - client._table_admin_client = table_api - table_api.set_iam_policy.return_value = iam_policy_pb - - iam_policy = Policy(etag=etag, version=version) - iam_policy[BIGTABLE_ADMIN_ROLE] = [ - Policy.user("user1@test.com"), - Policy.service_account("service_acc1@test.com"), - ] + assert result[0] == response_iterator - result = table.set_iam_policy(iam_policy) - table_api.set_iam_policy.assert_called_once_with( - request={"resource": table.name, "policy": iam_policy_pb} - ) - self.assertEqual(result.version, version) - self.assertEqual(result.etag, etag) - admins = result.bigtable_admins - self.assertEqual(len(admins), len(members)) - for found, expected in zip(sorted(admins), sorted(members)): - self.assertEqual(found, expected) - - def test_test_iam_permissions(self): - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) - from google.iam.v1 import iam_policy_pb2 +def test_table_truncate(): + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + table_api = client._table_admin_client = _make_table_api() - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) + with mock.patch("google.cloud.bigtable.table.Table.name", new=TABLE_NAME): + result = table.truncate() + + assert result is None - permissions = ["bigtable.tables.mutateRows", "bigtable.tables.readRows"] + table_api.drop_row_range.assert_called_once_with( + request={"name": TABLE_NAME, "delete_all_data_from_table": True} + ) - response = iam_policy_pb2.TestIamPermissionsResponse(permissions=permissions) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - table_api.test_iam_permissions.return_value = response - client._table_admin_client = table_api +def test_table_truncate_w_timeout(): + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + table_api = client._table_admin_client = _make_table_api() - result = table.test_iam_permissions(permissions) + timeout = 120 + result = table.truncate(timeout=timeout) - self.assertEqual(result, permissions) - table_api.test_iam_permissions.assert_called_once_with( - request={"resource": table.name, "permissions": permissions} - ) + assert result is None - def test_backup_factory_defaults(self): - from google.cloud.bigtable.backup import Backup - - instance = self._make_one(self.INSTANCE_ID, None) - table = self._make_one(self.TABLE_ID, instance) - backup = table.backup(self.BACKUP_ID) - - self.assertIsInstance(backup, Backup) - self.assertEqual(backup.backup_id, self.BACKUP_ID) - self.assertIs(backup._instance, instance) - self.assertIsNone(backup._cluster) - self.assertEqual(backup.table_id, self.TABLE_ID) - self.assertIsNone(backup._expire_time) - - self.assertIsNone(backup._parent) - self.assertIsNone(backup._source_table) - self.assertIsNone(backup._start_time) - self.assertIsNone(backup._end_time) - self.assertIsNone(backup._size_bytes) - self.assertIsNone(backup._state) - - def test_backup_factory_non_defaults(self): - import datetime - from google.cloud._helpers import UTC - from google.cloud.bigtable.backup import Backup - - instance = self._make_one(self.INSTANCE_ID, None) - table = self._make_one(self.TABLE_ID, instance) - timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC) - backup = table.backup( - self.BACKUP_ID, cluster_id=self.CLUSTER_ID, expire_time=timestamp, - ) + table_api.drop_row_range.assert_called_once_with( + request={"name": TABLE_NAME, "delete_all_data_from_table": True}, timeout=120, + ) - self.assertIsInstance(backup, Backup) - self.assertEqual(backup.backup_id, self.BACKUP_ID) - self.assertIs(backup._instance, instance) - - self.assertEqual(backup.backup_id, self.BACKUP_ID) - self.assertIs(backup._cluster, self.CLUSTER_ID) - self.assertEqual(backup.table_id, self.TABLE_ID) - self.assertEqual(backup._expire_time, timestamp) - self.assertIsNone(backup._start_time) - self.assertIsNone(backup._end_time) - self.assertIsNone(backup._size_bytes) - self.assertIsNone(backup._state) - - def _list_backups_helper(self, cluster_id=None, filter_=None, **kwargs): - from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, - ) - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - BigtableTableAdminClient, - ) - from google.cloud.bigtable_admin_v2.types import ( - bigtable_table_admin, - Backup as backup_pb, - ) - from google.cloud.bigtable.backup import Backup - instance_api = mock.create_autospec(BigtableInstanceAdminClient) - table_api = mock.create_autospec(BigtableTableAdminClient) - client = self._make_client( - project=self.PROJECT_ID, credentials=_make_credentials(), admin=True - ) - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) - - client._instance_admin_client = instance_api - client._table_admin_client = table_api - table._instance._client._instance_admin_client = instance_api - table._instance._client._table_admin_client = table_api - - parent = self.INSTANCE_NAME + "/clusters/cluster" - backups_pb = bigtable_table_admin.ListBackupsResponse( - backups=[ - backup_pb(name=parent + "/backups/op1"), - backup_pb(name=parent + "/backups/op2"), - backup_pb(name=parent + "/backups/op3"), - ] - ) +def test_table_drop_by_prefix(): + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + table_api = client._table_admin_client = _make_table_api() - table_api.list_backups.return_value = backups_pb - api = table._instance._client._table_admin_client.list_backups + row_key_prefix = b"row-key-prefix" - backups_filter = "source_table:{}".format(self.TABLE_NAME) - if filter_: - backups_filter = "({}) AND ({})".format(backups_filter, filter_) + result = table.drop_by_prefix(row_key_prefix=row_key_prefix) - backups = table.list_backups(cluster_id=cluster_id, filter_=filter_, **kwargs) + assert result is None - for backup in backups: - self.assertIsInstance(backup, Backup) + table_api.drop_row_range.assert_called_once_with( + request={"name": TABLE_NAME, "row_key_prefix": row_key_prefix}, + ) - if not cluster_id: - cluster_id = "-" - parent = "{}/clusters/{}".format(self.INSTANCE_NAME, cluster_id) - order_by = None - page_size = 0 - if "order_by" in kwargs: - order_by = kwargs["order_by"] +def test_table_drop_by_prefix_w_timeout(): + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + table_api = client._table_admin_client = _make_table_api() - if "page_size" in kwargs: - page_size = kwargs["page_size"] + row_key_prefix = b"row-key-prefix" - api.assert_called_once_with( - request={ - "parent": parent, - "filter": backups_filter, - "order_by": order_by, - "page_size": page_size, - } - ) + timeout = 120 + result = table.drop_by_prefix(row_key_prefix=row_key_prefix, timeout=timeout) - def test_list_backups_defaults(self): - self._list_backups_helper() + assert result is None - def test_list_backups_w_options(self): - self._list_backups_helper( - cluster_id="cluster", filter_="filter", order_by="order_by", page_size=10 - ) + table_api.drop_row_range.assert_called_once_with( + request={"name": TABLE_NAME, "row_key_prefix": row_key_prefix}, timeout=120, + ) - def _restore_helper(self, backup_name=None): - from google.cloud.bigtable_admin_v2 import BigtableTableAdminClient - from google.cloud.bigtable.instance import Instance - op_future = object() - credentials = _make_credentials() - client = self._make_client( - project=self.PROJECT_ID, credentials=credentials, admin=True - ) +def test_table_mutations_batcher_factory(): + flush_count = 100 + max_row_bytes = 1000 + table = _make_table(TABLE_ID, None) + mutation_batcher = table.mutations_batcher( + flush_count=flush_count, max_row_bytes=max_row_bytes + ) - instance = Instance(self.INSTANCE_ID, client=client) - table = self._make_one(self.TABLE_ID, instance) + assert mutation_batcher.table.table_id == TABLE_ID + assert mutation_batcher.flush_count == flush_count + assert mutation_batcher.max_row_bytes == max_row_bytes - api = client._table_admin_client = mock.create_autospec( - BigtableTableAdminClient - ) - api.restore_table.return_value = op_future - table._instance._client._table_admin_client = api +def test_table_get_iam_policy(): + from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE - if backup_name: - future = table.restore(self.TABLE_ID, backup_name=self.BACKUP_NAME) - else: - future = table.restore(self.TABLE_ID, self.CLUSTER_ID, self.BACKUP_ID) - self.assertIs(future, op_future) - - api.restore_table.assert_called_once_with( - request={ - "parent": self.INSTANCE_NAME, - "table_id": self.TABLE_ID, - "backup": self.BACKUP_NAME, - } - ) + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + + version = 1 + etag = b"etag_v1" + members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] + bindings = [{"role": BIGTABLE_ADMIN_ROLE, "members": members}] + iam_policy = policy_pb2.Policy(version=version, etag=etag, bindings=bindings) + + table_api = client._table_admin_client = _make_table_api() + table_api.get_iam_policy.return_value = iam_policy - def test_restore_table_w_backup_id(self): - self._restore_helper() + result = table.get_iam_policy() - def test_restore_table_w_backup_name(self): - self._restore_helper(backup_name=self.BACKUP_NAME) + assert result.version == version + assert result.etag == etag + admins = result.bigtable_admins + assert len(admins) == len(members) + for found, expected in zip(sorted(admins), sorted(members)): + assert found == expected -class Test__RetryableMutateRowsWorker(unittest.TestCase): - from grpc import StatusCode + table_api.get_iam_policy.assert_called_once_with(request={"resource": table.name}) - PROJECT_ID = "project-id" - INSTANCE_ID = "instance-id" - INSTANCE_NAME = "projects/" + PROJECT_ID + "/instances/" + INSTANCE_ID - TABLE_ID = "table-id" - # RPC Status Codes - SUCCESS = StatusCode.OK.value[0] - RETRYABLE_1 = StatusCode.DEADLINE_EXCEEDED.value[0] - RETRYABLE_2 = StatusCode.ABORTED.value[0] - RETRYABLE_3 = StatusCode.UNAVAILABLE.value[0] - RETRYABLES = (RETRYABLE_1, RETRYABLE_2, RETRYABLE_3) - NON_RETRYABLE = StatusCode.CANCELLED.value[0] +def test_table_set_iam_policy(): + from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.policy import Policy + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE - @staticmethod - def _get_target_class_for_worker(): - from google.cloud.bigtable.table import _RetryableMutateRowsWorker + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) - return _RetryableMutateRowsWorker + version = 1 + etag = b"etag_v1" + members = ["serviceAccount:service_acc1@test.com", "user:user1@test.com"] + bindings = [{"role": BIGTABLE_ADMIN_ROLE, "members": sorted(members)}] + iam_policy_pb = policy_pb2.Policy(version=version, etag=etag, bindings=bindings) - def _make_worker(self, *args, **kwargs): - return self._get_target_class_for_worker()(*args, **kwargs) + table_api = client._table_admin_client = _make_table_api() + table_api.set_iam_policy.return_value = iam_policy_pb - @staticmethod - def _get_target_class_for_table(): - from google.cloud.bigtable.table import Table + iam_policy = Policy(etag=etag, version=version) + iam_policy[BIGTABLE_ADMIN_ROLE] = [ + Policy.user("user1@test.com"), + Policy.service_account("service_acc1@test.com"), + ] - return Table + result = table.set_iam_policy(iam_policy) - def _make_table(self, *args, **kwargs): - return self._get_target_class_for_table()(*args, **kwargs) + assert result.version == version + assert result.etag == etag + admins = result.bigtable_admins + assert len(admins) == len(members) - @staticmethod - def _get_target_client_class(): - from google.cloud.bigtable.client import Client + for found, expected in zip(sorted(admins), sorted(members)): + assert found == expected + + table_api.set_iam_policy.assert_called_once_with( + request={"resource": table.name, "policy": iam_policy_pb} + ) - return Client - def _make_client(self, *args, **kwargs): - return self._get_target_client_class()(*args, **kwargs) +def test_table_test_iam_permissions(): + from google.iam.v1 import iam_policy_pb2 - def _make_responses_statuses(self, codes): - from google.rpc.status_pb2 import Status + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) - response = [Status(code=code) for code in codes] - return response + permissions = ["bigtable.tables.mutateRows", "bigtable.tables.readRows"] - def _make_responses(self, codes): - from google.cloud.bigtable_v2.types.bigtable import MutateRowsResponse - from google.rpc.status_pb2 import Status + response = iam_policy_pb2.TestIamPermissionsResponse(permissions=permissions) - entries = [ - MutateRowsResponse.Entry(index=i, status=Status(code=codes[i])) - for i in range(len(codes)) + table_api = client._table_admin_client = _make_table_api() + table_api.test_iam_permissions.return_value = response + + result = table.test_iam_permissions(permissions) + + assert result == permissions + + table_api.test_iam_permissions.assert_called_once_with( + request={"resource": table.name, "permissions": permissions} + ) + + +def test_table_backup_factory_defaults(): + from google.cloud.bigtable.backup import Backup + + instance = _make_table(INSTANCE_ID, None) + table = _make_table(TABLE_ID, instance) + backup = table.backup(BACKUP_ID) + + assert isinstance(backup, Backup) + assert backup.backup_id == BACKUP_ID + assert backup._instance is instance + assert backup._cluster is None + assert backup.table_id == TABLE_ID + assert backup._expire_time is None + + assert backup._parent is None + assert backup._source_table is None + assert backup._start_time is None + assert backup._end_time is None + assert backup._size_bytes is None + assert backup._state is None + + +def test_table_backup_factory_non_defaults(): + import datetime + from google.cloud._helpers import UTC + from google.cloud.bigtable.backup import Backup + from google.cloud.bigtable.instance import Instance + + instance = Instance(INSTANCE_ID, None) + table = _make_table(TABLE_ID, instance) + timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC) + backup = table.backup(BACKUP_ID, cluster_id=CLUSTER_ID, expire_time=timestamp,) + + assert isinstance(backup, Backup) + assert backup.backup_id == BACKUP_ID + assert backup._instance is instance + + assert backup.backup_id == BACKUP_ID + assert backup._cluster is CLUSTER_ID + assert backup.table_id == TABLE_ID + assert backup._expire_time == timestamp + assert backup._start_time is None + assert backup._end_time is None + assert backup._size_bytes is None + assert backup._state is None + + +def _table_list_backups_helper(cluster_id=None, filter_=None, **kwargs): + from google.cloud.bigtable_admin_v2.types import ( + Backup as backup_pb, + bigtable_table_admin, + ) + from google.cloud.bigtable.backup import Backup + + client = _make_client( + project=PROJECT_ID, credentials=_make_credentials(), admin=True + ) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + + parent = INSTANCE_NAME + "/clusters/cluster" + backups_pb = bigtable_table_admin.ListBackupsResponse( + backups=[ + backup_pb(name=parent + "/backups/op1"), + backup_pb(name=parent + "/backups/op2"), + backup_pb(name=parent + "/backups/op3"), ] - return MutateRowsResponse(entries=entries) + ) - def test_callable_empty_rows(self): - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) + table_api = client._table_admin_client = _make_table_api() + table_api.list_backups.return_value = backups_pb - data_api = mock.create_autospec(BigtableClient) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - client._table_data_client = data_api - client._table_admin_client = table_api - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_table(self.TABLE_ID, instance) + backups_filter = "source_table:{}".format(TABLE_NAME) + if filter_: + backups_filter = "({}) AND ({})".format(backups_filter, filter_) - worker = self._make_worker(client, table.name, []) - statuses = worker() + backups = table.list_backups(cluster_id=cluster_id, filter_=filter_, **kwargs) - self.assertEqual(len(statuses), 0) + for backup in backups: + assert isinstance(backup, Backup) - def test_callable_no_retry_strategy(self): - from google.cloud.bigtable.row import DirectRow - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) + if not cluster_id: + cluster_id = "-" + parent = "{}/clusters/{}".format(INSTANCE_NAME, cluster_id) - # Setup: - # - Mutate 3 rows. - # Action: - # - Attempt to mutate the rows w/o any retry strategy. - # Expectation: - # - Since no retry, should return statuses as they come back. - # - Even if there are retryable errors, no retry attempt is made. - # - State of responses_statuses should be - # [success, retryable, non-retryable] - - data_api = mock.create_autospec(BigtableClient) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - client._table_data_client = data_api - client._table_admin_client = table_api - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_table(self.TABLE_ID, instance) + order_by = None + page_size = 0 + if "order_by" in kwargs: + order_by = kwargs["order_by"] - row_1 = DirectRow(row_key=b"row_key", table=table) - row_1.set_cell("cf", b"col", b"value1") - row_2 = DirectRow(row_key=b"row_key_2", table=table) - row_2.set_cell("cf", b"col", b"value2") - row_3 = DirectRow(row_key=b"row_key_3", table=table) - row_3.set_cell("cf", b"col", b"value3") + if "page_size" in kwargs: + page_size = kwargs["page_size"] - response_codes = [self.SUCCESS, self.RETRYABLE_1, self.NON_RETRYABLE] - response = self._make_responses(response_codes) - data_api.mutate_rows = mock.MagicMock(return_value=[response]) + table_api.list_backups.assert_called_once_with( + request={ + "parent": parent, + "filter": backups_filter, + "order_by": order_by, + "page_size": page_size, + } + ) - table._instance._client._table_data_client = data_api - table._instance._client._table_admin_client = table_api - table._instance._client._table_data_client.mutate_rows.return_value = [response] +def test_table_list_backups_defaults(): + _table_list_backups_helper() - worker = self._make_worker(client, table.name, [row_1, row_2, row_3]) - statuses = worker(retry=None) - result = [status.code for status in statuses] - self.assertEqual(result, response_codes) +def test_table_list_backups_w_options(): + _table_list_backups_helper( + cluster_id="cluster", filter_="filter", order_by="order_by", page_size=10 + ) - data_api.mutate_rows.assert_called_once() - def test_callable_retry(self): - from google.cloud.bigtable.row import DirectRow - from google.cloud.bigtable.table import DEFAULT_RETRY - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - client as bigtable_table_admin, - ) +def _table_restore_helper(backup_name=None): + from google.cloud.bigtable.instance import Instance - # Setup: - # - Mutate 3 rows. - # Action: - # - Initial attempt will mutate all 3 rows. - # Expectation: - # - First attempt will result in one retryable error. - # - Second attempt will result in success for the retry-ed row. - # - Check MutateRows is called twice. - # - State of responses_statuses should be - # [success, success, non-retryable] - - data_api = mock.create_autospec(BigtableClient) - table_api = mock.create_autospec(bigtable_table_admin.BigtableTableAdminClient) - - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - client._table_data_client = data_api - client._table_admin_client = table_api - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_table(self.TABLE_ID, instance) - row_1 = DirectRow(row_key=b"row_key", table=table) - row_1.set_cell("cf", b"col", b"value1") - row_2 = DirectRow(row_key=b"row_key_2", table=table) - row_2.set_cell("cf", b"col", b"value2") - row_3 = DirectRow(row_key=b"row_key_3", table=table) - row_3.set_cell("cf", b"col", b"value3") - - response_1 = self._make_responses( - [self.SUCCESS, self.RETRYABLE_1, self.NON_RETRYABLE] - ) - response_2 = self._make_responses([self.SUCCESS]) + op_future = object() + credentials = _make_credentials() + client = _make_client(project=PROJECT_ID, credentials=credentials, admin=True) - # Patch the stub used by the API method. - client._table_data_client.mutate_rows.side_effect = [[response_1], [response_2]] - table._instance._client._table_data_client = data_api - table._instance._client._table_admin_client = table_api + instance = Instance(INSTANCE_ID, client=client) + table = _make_table(TABLE_ID, instance) - retry = DEFAULT_RETRY.with_delay(initial=0.1) - worker = self._make_worker(client, table.name, [row_1, row_2, row_3]) - statuses = worker(retry=retry) + table_api = client._table_admin_client = _make_table_api() + table_api.restore_table.return_value = op_future - result = [status.code for status in statuses] - expected_result = [self.SUCCESS, self.SUCCESS, self.NON_RETRYABLE] + if backup_name: + future = table.restore(TABLE_ID, backup_name=BACKUP_NAME) + else: + future = table.restore(TABLE_ID, CLUSTER_ID, BACKUP_ID) - self.assertEqual(client._table_data_client.mutate_rows.call_count, 2) - self.assertEqual(result, expected_result) + assert future is op_future - def _do_mutate_retryable_rows_helper( - self, - row_cells, - responses, - prior_statuses=None, - expected_result=None, - raising_retry=False, - retryable_error=False, - timeout=None, - ): - from google.api_core.exceptions import ServiceUnavailable - from google.cloud.bigtable.row import DirectRow - from google.cloud.bigtable.table import _BigtableRetryableError - from google.cloud.bigtable_v2.services.bigtable import BigtableClient - from google.cloud.bigtable_v2.types import bigtable as data_messages_v2_pb2 - - # Setup: - # - Mutate 2 rows. - # Action: - # - Initial attempt will mutate all 2 rows. - # Expectation: - # - Expect [success, non-retryable] - - data_api = mock.create_autospec(BigtableClient) - - credentials = _make_credentials() - client = self._make_client( - project="project-id", credentials=credentials, admin=True - ) - client._table_data_client = data_api - instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_table(self.TABLE_ID, instance) - - rows = [] - for row_key, cell_data in row_cells: - row = DirectRow(row_key=row_key, table=table) - row.set_cell(*cell_data) - rows.append(row) + expected_request = { + "parent": INSTANCE_NAME, + "table_id": TABLE_ID, + "backup": BACKUP_NAME, + } + table_api.restore_table.assert_called_once_with(request=expected_request) - response = self._make_responses(responses) - if retryable_error: - data_api.mutate_rows.side_effect = ServiceUnavailable("testing") - else: - data_api.mutate_rows.side_effect = [[response]] +def test_table_restore_table_w_backup_id(): + _table_restore_helper() - worker = self._make_worker(client, table.name, rows=rows) - if prior_statuses is not None: - assert len(prior_statuses) == len(rows) - worker.responses_statuses = self._make_responses_statuses(prior_statuses) - expected_entries = [] - for row, prior_status in zip(rows, worker.responses_statuses): +def test_table_restore_table_w_backup_name(): + _table_restore_helper(backup_name=BACKUP_NAME) - if prior_status is None or prior_status.code in self.RETRYABLES: - mutations = row._get_mutations().copy() # row clears on success - entry = data_messages_v2_pb2.MutateRowsRequest.Entry( - row_key=row.row_key, mutations=mutations, - ) - expected_entries.append(entry) - expected_kwargs = {} - if timeout is not None: - worker.timeout = timeout - expected_kwargs["timeout"] = mock.ANY +def _make_worker(*args, **kwargs): + from google.cloud.bigtable.table import _RetryableMutateRowsWorker - if retryable_error or raising_retry: - with self.assertRaises(_BigtableRetryableError): - worker._do_mutate_retryable_rows() - statuses = worker.responses_statuses - else: - statuses = worker._do_mutate_retryable_rows() + return _RetryableMutateRowsWorker(*args, **kwargs) - if not retryable_error: - result = [status.code for status in statuses] - if expected_result is None: - expected_result = responses +def _make_responses_statuses(codes): + from google.rpc.status_pb2 import Status - self.assertEqual(result, expected_result) + response = [Status(code=code) for code in codes] + return response - if len(responses) == 0 and not retryable_error: - data_api.mutate_rows.assert_not_called() - else: - data_api.mutate_rows.assert_called_once_with( - table_name=table.name, - entries=expected_entries, - app_profile_id=None, - retry=None, - **expected_kwargs, + +def _make_responses(codes): + from google.cloud.bigtable_v2.types.bigtable import MutateRowsResponse + from google.rpc.status_pb2 import Status + + entries = [ + MutateRowsResponse.Entry(index=i, status=Status(code=codes[i])) + for i in range(len(codes)) + ] + return MutateRowsResponse(entries=entries) + + +def test_rmrw_callable_empty_rows(): + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + data_api = client._table_data_client = _make_data_api() + data_api.mutate_rows.return_value = [] + data_api.table_path.return_value = ( + f"projects/{PROJECT_ID}/instances/{INSTANCE_ID}/tables/{TABLE_ID}" + ) + + worker = _make_worker(client, table.name, []) + statuses = worker() + + assert len(statuses) == 0 + + +def test_rmrw_callable_no_retry_strategy(): + from google.cloud.bigtable.row import DirectRow + + # Setup: + # - Mutate 3 rows. + # Action: + # - Attempt to mutate the rows w/o any retry strategy. + # Expectation: + # - Since no retry, should return statuses as they come back. + # - Even if there are retryable errors, no retry attempt is made. + # - State of responses_statuses should be + # [success, retryable, non-retryable] + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + + row_1 = DirectRow(row_key=b"row_key", table=table) + row_1.set_cell("cf", b"col", b"value1") + row_2 = DirectRow(row_key=b"row_key_2", table=table) + row_2.set_cell("cf", b"col", b"value2") + row_3 = DirectRow(row_key=b"row_key_3", table=table) + row_3.set_cell("cf", b"col", b"value3") + + response_codes = [SUCCESS, RETRYABLE_1, NON_RETRYABLE] + response = _make_responses(response_codes) + + data_api = client._table_data_client = _make_data_api() + data_api.mutate_rows.return_value = [response] + data_api.table_path.return_value = ( + f"projects/{PROJECT_ID}/instances/{INSTANCE_ID}/tables/{TABLE_ID}" + ) + worker = _make_worker(client, table.name, [row_1, row_2, row_3]) + + statuses = worker(retry=None) + + result = [status.code for status in statuses] + assert result == response_codes + + data_api.mutate_rows.assert_called_once() + + +def test_rmrw_callable_retry(): + from google.cloud.bigtable.row import DirectRow + from google.cloud.bigtable.table import DEFAULT_RETRY + + # Setup: + # - Mutate 3 rows. + # Action: + # - Initial attempt will mutate all 3 rows. + # Expectation: + # - First attempt will result in one retryable error. + # - Second attempt will result in success for the retry-ed row. + # - Check MutateRows is called twice. + # - State of responses_statuses should be + # [success, success, non-retryable] + + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + row_1 = DirectRow(row_key=b"row_key", table=table) + row_1.set_cell("cf", b"col", b"value1") + row_2 = DirectRow(row_key=b"row_key_2", table=table) + row_2.set_cell("cf", b"col", b"value2") + row_3 = DirectRow(row_key=b"row_key_3", table=table) + row_3.set_cell("cf", b"col", b"value3") + + response_1 = _make_responses([SUCCESS, RETRYABLE_1, NON_RETRYABLE]) + response_2 = _make_responses([SUCCESS]) + data_api = client._table_data_client = _make_data_api() + data_api.mutate_rows.side_effect = [[response_1], [response_2]] + data_api.table_path.return_value = ( + f"projects/{PROJECT_ID}/instances/{INSTANCE_ID}/tables/{TABLE_ID}" + ) + worker = _make_worker(client, table.name, [row_1, row_2, row_3]) + retry = DEFAULT_RETRY.with_delay(initial=0.1) + + statuses = worker(retry=retry) + + result = [status.code for status in statuses] + + assert result == [SUCCESS, SUCCESS, NON_RETRYABLE] + + assert client._table_data_client.mutate_rows.call_count == 2 + + +def _do_mutate_retryable_rows_helper( + row_cells, + responses, + prior_statuses=None, + expected_result=None, + raising_retry=False, + retryable_error=False, + timeout=None, +): + from google.api_core.exceptions import ServiceUnavailable + from google.cloud.bigtable.row import DirectRow + from google.cloud.bigtable.table import _BigtableRetryableError + from google.cloud.bigtable_v2.types import bigtable as data_messages_v2_pb2 + + # Setup: + # - Mutate 2 rows. + # Action: + # - Initial attempt will mutate all 2 rows. + # Expectation: + # - Expect [success, non-retryable] + + credentials = _make_credentials() + client = _make_client(project="project-id", credentials=credentials, admin=True) + instance = client.instance(instance_id=INSTANCE_ID) + table = _make_table(TABLE_ID, instance) + + rows = [] + for row_key, cell_data in row_cells: + row = DirectRow(row_key=row_key, table=table) + row.set_cell(*cell_data) + rows.append(row) + + response = _make_responses(responses) + + data_api = client._table_data_client = _make_data_api() + if retryable_error: + data_api.mutate_rows.side_effect = ServiceUnavailable("testing") + else: + data_api.mutate_rows.return_value = [response] + + worker = _make_worker(client, table.name, rows=rows) + + if prior_statuses is not None: + assert len(prior_statuses) == len(rows) + worker.responses_statuses = _make_responses_statuses(prior_statuses) + + expected_entries = [] + for row, prior_status in zip(rows, worker.responses_statuses): + + if prior_status is None or prior_status.code in RETRYABLES: + mutations = row._get_mutations().copy() # row clears on success + entry = data_messages_v2_pb2.MutateRowsRequest.Entry( + row_key=row.row_key, mutations=mutations, ) - if timeout is not None: - called = data_api.mutate_rows.mock_calls[0] - self.assertEqual(called.kwargs["timeout"]._deadline, timeout) - - def test_do_mutate_retryable_rows_empty_rows(self): - # - # Setup: - # - No mutated rows. - # Action: - # - No API call made. - # Expectation: - # - No change. - # - row_cells = [] - responses = [] - - self._do_mutate_retryable_rows_helper(row_cells, responses) - - def test_do_mutate_retryable_rows_w_timeout(self): - # - # Setup: - # - Mutate 2 rows. - # Action: - # - Initial attempt will mutate all 2 rows. - # Expectation: - # - No retryable error codes, so don't expect a raise. - # - State of responses_statuses should be [success, non-retryable]. - # - row_cells = [ - (b"row_key_1", ("cf", b"col", b"value1")), - (b"row_key_2", ("cf", b"col", b"value2")), - ] + expected_entries.append(entry) - responses = [self.SUCCESS, self.NON_RETRYABLE] + expected_kwargs = {} + if timeout is not None: + worker.timeout = timeout + expected_kwargs["timeout"] = mock.ANY - timeout = 5 # seconds + if retryable_error or raising_retry: + with pytest.raises(_BigtableRetryableError): + worker._do_mutate_retryable_rows() + statuses = worker.responses_statuses + else: + statuses = worker._do_mutate_retryable_rows() - self._do_mutate_retryable_rows_helper( - row_cells, responses, timeout=timeout, - ) + if not retryable_error: + result = [status.code for status in statuses] - def test_do_mutate_retryable_rows_w_retryable_error(self): - # - # Setup: - # - Mutate 2 rows. - # Action: - # - Initial attempt will mutate all 2 rows. - # Expectation: - # - No retryable error codes, so don't expect a raise. - # - State of responses_statuses should be [success, non-retryable]. - # - row_cells = [ - (b"row_key_1", ("cf", b"col", b"value1")), - (b"row_key_2", ("cf", b"col", b"value2")), - ] + if expected_result is None: + expected_result = responses - responses = () + assert result == expected_result - self._do_mutate_retryable_rows_helper( - row_cells, responses, retryable_error=True, + if len(responses) == 0 and not retryable_error: + data_api.mutate_rows.assert_not_called() + else: + data_api.mutate_rows.assert_called_once_with( + table_name=table.name, + entries=expected_entries, + app_profile_id=None, + retry=None, + **expected_kwargs, ) + if timeout is not None: + called = data_api.mutate_rows.mock_calls[0] + assert called.kwargs["timeout"]._deadline == timeout + + +def test_rmrw_do_mutate_retryable_rows_empty_rows(): + # + # Setup: + # - No mutated rows. + # Action: + # - No API call made. + # Expectation: + # - No change. + # + row_cells = [] + responses = [] + + _do_mutate_retryable_rows_helper(row_cells, responses) + + +def test_rmrw_do_mutate_retryable_rows_w_timeout(): + # + # Setup: + # - Mutate 2 rows. + # Action: + # - Initial attempt will mutate all 2 rows. + # Expectation: + # - No retryable error codes, so don't expect a raise. + # - State of responses_statuses should be [success, non-retryable]. + # + row_cells = [ + (b"row_key_1", ("cf", b"col", b"value1")), + (b"row_key_2", ("cf", b"col", b"value2")), + ] + + responses = [SUCCESS, NON_RETRYABLE] + + timeout = 5 # seconds + + _do_mutate_retryable_rows_helper( + row_cells, responses, timeout=timeout, + ) - def test_do_mutate_retryable_rows_retry(self): - # - # Setup: - # - Mutate 3 rows. - # Action: - # - Initial attempt will mutate all 3 rows. - # Expectation: - # - Second row returns retryable error code, so expect a raise. - # - State of responses_statuses should be - # [success, retryable, non-retryable] - # - row_cells = [ - (b"row_key_1", ("cf", b"col", b"value1")), - (b"row_key_2", ("cf", b"col", b"value2")), - (b"row_key_3", ("cf", b"col", b"value3")), - ] - responses = [self.SUCCESS, self.RETRYABLE_1, self.NON_RETRYABLE] +def test_rmrw_do_mutate_retryable_rows_w_retryable_error(): + # + # Setup: + # - Mutate 2 rows. + # Action: + # - Initial attempt will mutate all 2 rows. + # Expectation: + # - No retryable error codes, so don't expect a raise. + # - State of responses_statuses should be [success, non-retryable]. + # + row_cells = [ + (b"row_key_1", ("cf", b"col", b"value1")), + (b"row_key_2", ("cf", b"col", b"value2")), + ] + + responses = () + + _do_mutate_retryable_rows_helper( + row_cells, responses, retryable_error=True, + ) - self._do_mutate_retryable_rows_helper( - row_cells, responses, raising_retry=True, - ) - def test_do_mutate_retryable_rows_second_retry(self): - # - # Setup: - # - Mutate 4 rows. - # - First try results: - # [success, retryable, non-retryable, retryable] - # Action: - # - Second try should re-attempt the 'retryable' rows. - # Expectation: - # - After second try: - # [success, success, non-retryable, retryable] - # - One of the rows tried second time returns retryable error code, - # so expect a raise. - # - Exception contains response whose index should be '3' even though - # only two rows were retried. - # - row_cells = [ - (b"row_key_1", ("cf", b"col", b"value1")), - (b"row_key_2", ("cf", b"col", b"value2")), - (b"row_key_3", ("cf", b"col", b"value3")), - (b"row_key_4", ("cf", b"col", b"value4")), - ] +def test_rmrw_do_mutate_retryable_rows_retry(): + # + # Setup: + # - Mutate 3 rows. + # Action: + # - Initial attempt will mutate all 3 rows. + # Expectation: + # - Second row returns retryable error code, so expect a raise. + # - State of responses_statuses should be + # [success, retryable, non-retryable] + # + row_cells = [ + (b"row_key_1", ("cf", b"col", b"value1")), + (b"row_key_2", ("cf", b"col", b"value2")), + (b"row_key_3", ("cf", b"col", b"value3")), + ] + + responses = [SUCCESS, RETRYABLE_1, NON_RETRYABLE] + + _do_mutate_retryable_rows_helper( + row_cells, responses, raising_retry=True, + ) - responses = [self.SUCCESS, self.RETRYABLE_1] - prior_statuses = [ - self.SUCCESS, - self.RETRYABLE_1, - self.NON_RETRYABLE, - self.RETRYABLE_2, - ] +def test_rmrw_do_mutate_retryable_rows_second_retry(): + # + # Setup: + # - Mutate 4 rows. + # - First try results: + # [success, retryable, non-retryable, retryable] + # Action: + # - Second try should re-attempt the 'retryable' rows. + # Expectation: + # - After second try: + # [success, success, non-retryable, retryable] + # - One of the rows tried second time returns retryable error code, + # so expect a raise. + # - Exception contains response whose index should be '3' even though + # only two rows were retried. + # + row_cells = [ + (b"row_key_1", ("cf", b"col", b"value1")), + (b"row_key_2", ("cf", b"col", b"value2")), + (b"row_key_3", ("cf", b"col", b"value3")), + (b"row_key_4", ("cf", b"col", b"value4")), + ] + + responses = [SUCCESS, RETRYABLE_1] + + prior_statuses = [ + SUCCESS, + RETRYABLE_1, + NON_RETRYABLE, + RETRYABLE_2, + ] + + expected_result = [ + SUCCESS, + SUCCESS, + NON_RETRYABLE, + RETRYABLE_1, + ] + + _do_mutate_retryable_rows_helper( + row_cells, + responses, + prior_statuses=prior_statuses, + expected_result=expected_result, + raising_retry=True, + ) - expected_result = [ - self.SUCCESS, - self.SUCCESS, - self.NON_RETRYABLE, - self.RETRYABLE_1, - ] - self._do_mutate_retryable_rows_helper( - row_cells, - responses, - prior_statuses=prior_statuses, - expected_result=expected_result, - raising_retry=True, - ) +def test_rmrw_do_mutate_retryable_rows_second_try(): + # + # Setup: + # - Mutate 4 rows. + # - First try results: + # [success, retryable, non-retryable, retryable] + # Action: + # - Second try should re-attempt the 'retryable' rows. + # Expectation: + # - After second try: + # [success, non-retryable, non-retryable, success] + # + row_cells = [ + (b"row_key_1", ("cf", b"col", b"value1")), + (b"row_key_2", ("cf", b"col", b"value2")), + (b"row_key_3", ("cf", b"col", b"value3")), + (b"row_key_4", ("cf", b"col", b"value4")), + ] + + responses = [NON_RETRYABLE, SUCCESS] + + prior_statuses = [ + SUCCESS, + RETRYABLE_1, + NON_RETRYABLE, + RETRYABLE_2, + ] + + expected_result = [ + SUCCESS, + NON_RETRYABLE, + NON_RETRYABLE, + SUCCESS, + ] + + _do_mutate_retryable_rows_helper( + row_cells, + responses, + prior_statuses=prior_statuses, + expected_result=expected_result, + ) - def test_do_mutate_retryable_rows_second_try(self): - # - # Setup: - # - Mutate 4 rows. - # - First try results: - # [success, retryable, non-retryable, retryable] - # Action: - # - Second try should re-attempt the 'retryable' rows. - # Expectation: - # - After second try: - # [success, non-retryable, non-retryable, success] - # - row_cells = [ - (b"row_key_1", ("cf", b"col", b"value1")), - (b"row_key_2", ("cf", b"col", b"value2")), - (b"row_key_3", ("cf", b"col", b"value3")), - (b"row_key_4", ("cf", b"col", b"value4")), - ] - responses = [self.NON_RETRYABLE, self.SUCCESS] +def test_rmrw_do_mutate_retryable_rows_second_try_no_retryable(): + # + # Setup: + # - Mutate 2 rows. + # - First try results: [success, non-retryable] + # Action: + # - Second try has no row to retry. + # Expectation: + # - After second try: [success, non-retryable] + # + row_cells = [ + (b"row_key_1", ("cf", b"col", b"value1")), + (b"row_key_2", ("cf", b"col", b"value2")), + ] + + responses = [] # no calls will be made + + prior_statuses = [ + SUCCESS, + NON_RETRYABLE, + ] + + expected_result = [ + SUCCESS, + NON_RETRYABLE, + ] + + _do_mutate_retryable_rows_helper( + row_cells, + responses, + prior_statuses=prior_statuses, + expected_result=expected_result, + ) - prior_statuses = [ - self.SUCCESS, - self.RETRYABLE_1, - self.NON_RETRYABLE, - self.RETRYABLE_2, - ] - expected_result = [ - self.SUCCESS, - self.NON_RETRYABLE, - self.NON_RETRYABLE, - self.SUCCESS, - ] +def test_rmrw_do_mutate_retryable_rows_mismatch_num_responses(): + row_cells = [ + (b"row_key_1", ("cf", b"col", b"value1")), + (b"row_key_2", ("cf", b"col", b"value2")), + ] - self._do_mutate_retryable_rows_helper( - row_cells, - responses, - prior_statuses=prior_statuses, - expected_result=expected_result, - ) + responses = [SUCCESS] - def test_do_mutate_retryable_rows_second_try_no_retryable(self): - # - # Setup: - # - Mutate 2 rows. - # - First try results: [success, non-retryable] - # Action: - # - Second try has no row to retry. - # Expectation: - # - After second try: [success, non-retryable] - # - row_cells = [ - (b"row_key_1", ("cf", b"col", b"value1")), - (b"row_key_2", ("cf", b"col", b"value2")), - ] + with pytest.raises(RuntimeError): + _do_mutate_retryable_rows_helper(row_cells, responses) - responses = [] # no calls will be made - prior_statuses = [ - self.SUCCESS, - self.NON_RETRYABLE, - ] +def test__create_row_request_table_name_only(): + from google.cloud.bigtable.table import _create_row_request - expected_result = [ - self.SUCCESS, - self.NON_RETRYABLE, - ] + table_name = "table_name" + result = _create_row_request(table_name) + expected_result = _ReadRowsRequestPB(table_name=table_name) + assert result == expected_result - self._do_mutate_retryable_rows_helper( - row_cells, - responses, - prior_statuses=prior_statuses, - expected_result=expected_result, - ) - def test_do_mutate_retryable_rows_mismatch_num_responses(self): - row_cells = [ - (b"row_key_1", ("cf", b"col", b"value1")), - (b"row_key_2", ("cf", b"col", b"value2")), - ] +def test__create_row_request_row_range_row_set_conflict(): + from google.cloud.bigtable.table import _create_row_request - responses = [self.SUCCESS] + with pytest.raises(ValueError): + _create_row_request(None, end_key=object(), row_set=object()) - with self.assertRaises(RuntimeError): - self._do_mutate_retryable_rows_helper(row_cells, responses) +def test__create_row_request_row_range_start_key(): + from google.cloud.bigtable.table import _create_row_request + from google.cloud.bigtable_v2.types import RowRange -class Test__create_row_request(unittest.TestCase): - def _call_fut( - self, - table_name, - start_key=None, - end_key=None, - filter_=None, - limit=None, - end_inclusive=False, - app_profile_id=None, - row_set=None, - ): + table_name = "table_name" + start_key = b"start_key" + result = _create_row_request(table_name, start_key=start_key) + expected_result = _ReadRowsRequestPB(table_name=table_name) + row_range = RowRange(start_key_closed=start_key) + expected_result.rows.row_ranges.append(row_range) + assert result == expected_result - from google.cloud.bigtable.table import _create_row_request - return _create_row_request( - table_name, - start_key=start_key, - end_key=end_key, - filter_=filter_, - limit=limit, - end_inclusive=end_inclusive, - app_profile_id=app_profile_id, - row_set=row_set, - ) +def test__create_row_request_row_range_end_key(): + from google.cloud.bigtable.table import _create_row_request + from google.cloud.bigtable_v2.types import RowRange - def test_table_name_only(self): - table_name = "table_name" - result = self._call_fut(table_name) - expected_result = _ReadRowsRequestPB(table_name=table_name) - self.assertEqual(result, expected_result) - - def test_row_range_row_set_conflict(self): - with self.assertRaises(ValueError): - self._call_fut(None, end_key=object(), row_set=object()) - - def test_row_range_start_key(self): - from google.cloud.bigtable_v2.types import RowRange - - table_name = "table_name" - start_key = b"start_key" - result = self._call_fut(table_name, start_key=start_key) - expected_result = _ReadRowsRequestPB(table_name=table_name) - row_range = RowRange(start_key_closed=start_key) - expected_result.rows.row_ranges.append(row_range) - self.assertEqual(result, expected_result) - - def test_row_range_end_key(self): - from google.cloud.bigtable_v2.types import RowRange - - table_name = "table_name" - end_key = b"end_key" - result = self._call_fut(table_name, end_key=end_key) - expected_result = _ReadRowsRequestPB(table_name=table_name) - row_range = RowRange(end_key_open=end_key) - expected_result.rows.row_ranges.append(row_range) - self.assertEqual(result, expected_result) - - def test_row_range_both_keys(self): - from google.cloud.bigtable_v2.types import RowRange - - table_name = "table_name" - start_key = b"start_key" - end_key = b"end_key" - result = self._call_fut(table_name, start_key=start_key, end_key=end_key) - row_range = RowRange(start_key_closed=start_key, end_key_open=end_key) - expected_result = _ReadRowsRequestPB(table_name=table_name) - expected_result.rows.row_ranges.append(row_range) - self.assertEqual(result, expected_result) - - def test_row_range_both_keys_inclusive(self): - from google.cloud.bigtable_v2.types import RowRange - - table_name = "table_name" - start_key = b"start_key" - end_key = b"end_key" - result = self._call_fut( - table_name, start_key=start_key, end_key=end_key, end_inclusive=True - ) - expected_result = _ReadRowsRequestPB(table_name=table_name) - row_range = RowRange(start_key_closed=start_key, end_key_closed=end_key) - expected_result.rows.row_ranges.append(row_range) - self.assertEqual(result, expected_result) - - def test_with_filter(self): - from google.cloud.bigtable.row_filters import RowSampleFilter - - table_name = "table_name" - row_filter = RowSampleFilter(0.33) - result = self._call_fut(table_name, filter_=row_filter) - expected_result = _ReadRowsRequestPB( - table_name=table_name, filter=row_filter.to_pb() - ) - self.assertEqual(result, expected_result) - - def test_with_limit(self): - table_name = "table_name" - limit = 1337 - result = self._call_fut(table_name, limit=limit) - expected_result = _ReadRowsRequestPB(table_name=table_name, rows_limit=limit) - self.assertEqual(result, expected_result) - - def test_with_row_set(self): - from google.cloud.bigtable.row_set import RowSet - - table_name = "table_name" - row_set = RowSet() - result = self._call_fut(table_name, row_set=row_set) - expected_result = _ReadRowsRequestPB(table_name=table_name) - self.assertEqual(result, expected_result) - - def test_with_app_profile_id(self): - table_name = "table_name" - limit = 1337 - app_profile_id = "app-profile-id" - result = self._call_fut(table_name, limit=limit, app_profile_id=app_profile_id) - expected_result = _ReadRowsRequestPB( - table_name=table_name, rows_limit=limit, app_profile_id=app_profile_id - ) - self.assertEqual(result, expected_result) + table_name = "table_name" + end_key = b"end_key" + result = _create_row_request(table_name, end_key=end_key) + expected_result = _ReadRowsRequestPB(table_name=table_name) + row_range = RowRange(end_key_open=end_key) + expected_result.rows.row_ranges.append(row_range) + assert result == expected_result + + +def test__create_row_request_row_range_both_keys(): + from google.cloud.bigtable.table import _create_row_request + from google.cloud.bigtable_v2.types import RowRange + + table_name = "table_name" + start_key = b"start_key" + end_key = b"end_key" + result = _create_row_request(table_name, start_key=start_key, end_key=end_key) + row_range = RowRange(start_key_closed=start_key, end_key_open=end_key) + expected_result = _ReadRowsRequestPB(table_name=table_name) + expected_result.rows.row_ranges.append(row_range) + assert result == expected_result + + +def test__create_row_request_row_range_both_keys_inclusive(): + from google.cloud.bigtable.table import _create_row_request + from google.cloud.bigtable_v2.types import RowRange + + table_name = "table_name" + start_key = b"start_key" + end_key = b"end_key" + result = _create_row_request( + table_name, start_key=start_key, end_key=end_key, end_inclusive=True + ) + expected_result = _ReadRowsRequestPB(table_name=table_name) + row_range = RowRange(start_key_closed=start_key, end_key_closed=end_key) + expected_result.rows.row_ranges.append(row_range) + assert result == expected_result + + +def test__create_row_request_with_filter(): + from google.cloud.bigtable.table import _create_row_request + from google.cloud.bigtable.row_filters import RowSampleFilter + + table_name = "table_name" + row_filter = RowSampleFilter(0.33) + result = _create_row_request(table_name, filter_=row_filter) + expected_result = _ReadRowsRequestPB( + table_name=table_name, filter=row_filter.to_pb() + ) + assert result == expected_result + + +def test__create_row_request_with_limit(): + from google.cloud.bigtable.table import _create_row_request + + table_name = "table_name" + limit = 1337 + result = _create_row_request(table_name, limit=limit) + expected_result = _ReadRowsRequestPB(table_name=table_name, rows_limit=limit) + assert result == expected_result + + +def test__create_row_request_with_row_set(): + from google.cloud.bigtable.table import _create_row_request + from google.cloud.bigtable.row_set import RowSet + + table_name = "table_name" + row_set = RowSet() + result = _create_row_request(table_name, row_set=row_set) + expected_result = _ReadRowsRequestPB(table_name=table_name) + assert result == expected_result + + +def test__create_row_request_with_app_profile_id(): + from google.cloud.bigtable.table import _create_row_request + + table_name = "table_name" + limit = 1337 + app_profile_id = "app-profile-id" + result = _create_row_request(table_name, limit=limit, app_profile_id=app_profile_id) + expected_result = _ReadRowsRequestPB( + table_name=table_name, rows_limit=limit, app_profile_id=app_profile_id + ) + assert result == expected_result def _ReadRowsRequestPB(*args, **kw): @@ -2169,90 +2011,83 @@ def _ReadRowsRequestPB(*args, **kw): return messages_v2_pb2.ReadRowsRequest(*args, **kw) -class Test_ClusterState(unittest.TestCase): - def test___eq__(self): - from google.cloud.bigtable.enums import Table as enum_table - from google.cloud.bigtable.table import ClusterState - - READY = enum_table.ReplicationState.READY - state1 = ClusterState(READY) - state2 = ClusterState(READY) - self.assertEqual(state1, state2) - - def test___eq__type_differ(self): - from google.cloud.bigtable.enums import Table as enum_table - from google.cloud.bigtable.table import ClusterState - - READY = enum_table.ReplicationState.READY - state1 = ClusterState(READY) - state2 = object() - self.assertNotEqual(state1, state2) - - def test___ne__same_value(self): - from google.cloud.bigtable.enums import Table as enum_table - from google.cloud.bigtable.table import ClusterState - - READY = enum_table.ReplicationState.READY - state1 = ClusterState(READY) - state2 = ClusterState(READY) - comparison_val = state1 != state2 - self.assertFalse(comparison_val) - - def test___ne__(self): - from google.cloud.bigtable.enums import Table as enum_table - from google.cloud.bigtable.table import ClusterState - - READY = enum_table.ReplicationState.READY - INITIALIZING = enum_table.ReplicationState.INITIALIZING - state1 = ClusterState(READY) - state2 = ClusterState(INITIALIZING) - self.assertNotEqual(state1, state2) - - def test__repr__(self): - from google.cloud.bigtable.enums import Table as enum_table - from google.cloud.bigtable.table import ClusterState - - STATE_NOT_KNOWN = enum_table.ReplicationState.STATE_NOT_KNOWN - INITIALIZING = enum_table.ReplicationState.INITIALIZING - PLANNED_MAINTENANCE = enum_table.ReplicationState.PLANNED_MAINTENANCE - UNPLANNED_MAINTENANCE = enum_table.ReplicationState.UNPLANNED_MAINTENANCE - READY = enum_table.ReplicationState.READY - - replication_dict = { - STATE_NOT_KNOWN: "STATE_NOT_KNOWN", - INITIALIZING: "INITIALIZING", - PLANNED_MAINTENANCE: "PLANNED_MAINTENANCE", - UNPLANNED_MAINTENANCE: "UNPLANNED_MAINTENANCE", - READY: "READY", - } +def test_cluster_state___eq__(): + from google.cloud.bigtable.enums import Table as enum_table + from google.cloud.bigtable.table import ClusterState - self.assertEqual( - str(ClusterState(STATE_NOT_KNOWN)), replication_dict[STATE_NOT_KNOWN] - ) - self.assertEqual( - str(ClusterState(INITIALIZING)), replication_dict[INITIALIZING] - ) - self.assertEqual( - str(ClusterState(PLANNED_MAINTENANCE)), - replication_dict[PLANNED_MAINTENANCE], - ) - self.assertEqual( - str(ClusterState(UNPLANNED_MAINTENANCE)), - replication_dict[UNPLANNED_MAINTENANCE], - ) - self.assertEqual(str(ClusterState(READY)), replication_dict[READY]) + READY = enum_table.ReplicationState.READY + state1 = ClusterState(READY) + state2 = ClusterState(READY) + assert state1 == state2 - self.assertEqual( - ClusterState(STATE_NOT_KNOWN).replication_state, STATE_NOT_KNOWN - ) - self.assertEqual(ClusterState(INITIALIZING).replication_state, INITIALIZING) - self.assertEqual( - ClusterState(PLANNED_MAINTENANCE).replication_state, PLANNED_MAINTENANCE - ) - self.assertEqual( - ClusterState(UNPLANNED_MAINTENANCE).replication_state, UNPLANNED_MAINTENANCE - ) - self.assertEqual(ClusterState(READY).replication_state, READY) + +def test_cluster_state___eq__type_differ(): + from google.cloud.bigtable.enums import Table as enum_table + from google.cloud.bigtable.table import ClusterState + + READY = enum_table.ReplicationState.READY + state1 = ClusterState(READY) + state2 = object() + assert not (state1 == state2) + + +def test_cluster_state___ne__same_value(): + from google.cloud.bigtable.enums import Table as enum_table + from google.cloud.bigtable.table import ClusterState + + READY = enum_table.ReplicationState.READY + state1 = ClusterState(READY) + state2 = ClusterState(READY) + assert not (state1 != state2) + + +def test_cluster_state___ne__(): + from google.cloud.bigtable.enums import Table as enum_table + from google.cloud.bigtable.table import ClusterState + + READY = enum_table.ReplicationState.READY + INITIALIZING = enum_table.ReplicationState.INITIALIZING + state1 = ClusterState(READY) + state2 = ClusterState(INITIALIZING) + assert state1 != state2 + + +def test_cluster_state__repr__(): + from google.cloud.bigtable.enums import Table as enum_table + from google.cloud.bigtable.table import ClusterState + + STATE_NOT_KNOWN = enum_table.ReplicationState.STATE_NOT_KNOWN + INITIALIZING = enum_table.ReplicationState.INITIALIZING + PLANNED_MAINTENANCE = enum_table.ReplicationState.PLANNED_MAINTENANCE + UNPLANNED_MAINTENANCE = enum_table.ReplicationState.UNPLANNED_MAINTENANCE + READY = enum_table.ReplicationState.READY + + replication_dict = { + STATE_NOT_KNOWN: "STATE_NOT_KNOWN", + INITIALIZING: "INITIALIZING", + PLANNED_MAINTENANCE: "PLANNED_MAINTENANCE", + UNPLANNED_MAINTENANCE: "UNPLANNED_MAINTENANCE", + READY: "READY", + } + + assert str(ClusterState(STATE_NOT_KNOWN)) == replication_dict[STATE_NOT_KNOWN] + assert str(ClusterState(INITIALIZING)) == replication_dict[INITIALIZING] + assert ( + str(ClusterState(PLANNED_MAINTENANCE)) == replication_dict[PLANNED_MAINTENANCE] + ) + assert ( + str(ClusterState(UNPLANNED_MAINTENANCE)) + == replication_dict[UNPLANNED_MAINTENANCE] + ) + assert str(ClusterState(READY)) == replication_dict[READY] + + assert ClusterState(STATE_NOT_KNOWN).replication_state == STATE_NOT_KNOWN + assert ClusterState(INITIALIZING).replication_state == INITIALIZING + assert ClusterState(PLANNED_MAINTENANCE).replication_state == PLANNED_MAINTENANCE + assert ( + ClusterState(UNPLANNED_MAINTENANCE).replication_state == UNPLANNED_MAINTENANCE + ) + assert ClusterState(READY).replication_state == READY def _ReadRowsResponseCellChunkPB(*args, **kw): From 78f23633d35f5457580d8b23173b1831570e7645 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 25 Oct 2021 10:24:37 -0400 Subject: [PATCH 16/16] tests: restore coverage for '__ne__' methods --- tests/unit/test_row_filters.py | 105 +++++++++++++++++++++++++++++++++ 1 file changed, 105 insertions(+) diff --git a/tests/unit/test_row_filters.py b/tests/unit/test_row_filters.py index 1fc3110ef..8c591e03c 100644 --- a/tests/unit/test_row_filters.py +++ b/tests/unit/test_row_filters.py @@ -160,6 +160,16 @@ def test_row_sample_filter___eq__same_value(): assert row_filter1 == row_filter2 +def test_row_sample_filter___ne__(): + from google.cloud.bigtable.row_filters import RowSampleFilter + + sample = object() + other_sample = object() + row_filter1 = RowSampleFilter(sample) + row_filter2 = RowSampleFilter(other_sample) + assert row_filter1 != row_filter2 + + def test_row_sample_filter_to_pb(): from google.cloud.bigtable.row_filters import RowSampleFilter @@ -306,6 +316,16 @@ def test_timestamp_range_filter___eq__same_value(): assert row_filter1 == row_filter2 +def test_timestamp_range_filter___ne__(): + from google.cloud.bigtable.row_filters import TimestampRangeFilter + + range_ = object() + other_range_ = object() + row_filter1 = TimestampRangeFilter(range_) + row_filter2 = TimestampRangeFilter(other_range_) + assert row_filter1 != row_filter2 + + def test_timestamp_range_filter_to_pb(): from google.cloud.bigtable.row_filters import TimestampRangeFilter from google.cloud.bigtable.row_filters import TimestampRange @@ -401,6 +421,32 @@ def test_column_range_filter___eq__type_differ(): assert not (row_filter1 == row_filter2) +def test_column_range_filter___ne__(): + from google.cloud.bigtable.row_filters import ColumnRangeFilter + + column_family_id = object() + other_column_family_id = object() + start_column = object() + end_column = object() + inclusive_start = object() + inclusive_end = object() + row_filter1 = ColumnRangeFilter( + column_family_id, + start_column=start_column, + end_column=end_column, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end, + ) + row_filter2 = ColumnRangeFilter( + other_column_family_id, + start_column=start_column, + end_column=end_column, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end, + ) + assert row_filter1 != row_filter2 + + def test_column_range_filter_to_pb(): from google.cloud.bigtable.row_filters import ColumnRangeFilter @@ -615,6 +661,29 @@ def test_value_range_filter___eq__type_differ(): assert not (row_filter1 == row_filter2) +def test_value_range_filter___ne__(): + from google.cloud.bigtable.row_filters import ValueRangeFilter + + start_value = object() + other_start_value = object() + end_value = object() + inclusive_start = object() + inclusive_end = object() + row_filter1 = ValueRangeFilter( + start_value=start_value, + end_value=end_value, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end, + ) + row_filter2 = ValueRangeFilter( + start_value=other_start_value, + end_value=end_value, + inclusive_start=inclusive_start, + inclusive_end=inclusive_end, + ) + assert row_filter1 != row_filter2 + + def test_value_range_filter_to_pb(): from google.cloud.bigtable.row_filters import ValueRangeFilter @@ -764,6 +833,16 @@ def test_apply_label_filter___eq__same_value(): assert row_filter1 == row_filter2 +def test_apply_label_filter___ne__(): + from google.cloud.bigtable.row_filters import ApplyLabelFilter + + label = object() + other_label = object() + row_filter1 = ApplyLabelFilter(label) + row_filter2 = ApplyLabelFilter(other_label) + assert row_filter1 != row_filter2 + + def test_apply_label_filter_to_pb(): from google.cloud.bigtable.row_filters import ApplyLabelFilter @@ -807,6 +886,16 @@ def test_filter_combination___eq__type_differ(): assert not (row_filter1 == row_filter2) +def test_filter_combination___ne__(): + from google.cloud.bigtable.row_filters import _FilterCombination + + filters = object() + other_filters = object() + row_filter1 = _FilterCombination(filters=filters) + row_filter2 = _FilterCombination(filters=other_filters) + assert row_filter1 != row_filter2 + + def test_row_filter_chain_to_pb(): from google.cloud.bigtable.row_filters import RowFilterChain from google.cloud.bigtable.row_filters import RowSampleFilter @@ -937,6 +1026,22 @@ def test_conditional_row_filter___eq__type_differ(): assert not (cond_filter1 == cond_filter2) +def test_conditional_row_filter___ne__(): + from google.cloud.bigtable.row_filters import ConditionalRowFilter + + base_filter = object() + other_base_filter = object() + true_filter = object() + false_filter = object() + cond_filter1 = ConditionalRowFilter( + base_filter, true_filter=true_filter, false_filter=false_filter + ) + cond_filter2 = ConditionalRowFilter( + other_base_filter, true_filter=true_filter, false_filter=false_filter + ) + assert cond_filter1 != cond_filter2 + + def test_conditional_row_filter_to_pb(): from google.cloud.bigtable.row_filters import ConditionalRowFilter from google.cloud.bigtable.row_filters import CellsRowOffsetFilter