diff --git a/tests/system/test_client.py b/tests/system/test_client.py index d52cb9eb9..a00193788 100644 --- a/tests/system/test_client.py +++ b/tests/system/test_client.py @@ -1552,7 +1552,7 @@ def test_dbapi_connection_does_not_leak_sockets(self): connection.close() conn_count_end = len(current_process.connections()) - self.assertEqual(conn_count_end, conn_count_start) + self.assertLessEqual(conn_count_end, conn_count_start) def _load_table_for_dml(self, rows, dataset_id, table_id): from google.cloud._testing import _NamedTemporaryFile diff --git a/tests/unit/job/test_query_pandas.py b/tests/unit/job/test_query_pandas.py index 044ca6e9a..e35051c5c 100644 --- a/tests/unit/job/test_query_pandas.py +++ b/tests/unit/job/test_query_pandas.py @@ -41,7 +41,6 @@ except (ImportError, AttributeError): # pragma: NO COVER tqdm = None -from google.cloud.bigquery._helpers import BQ_STORAGE_VERSIONS from ..helpers import make_connection from .helpers import _make_client from .helpers import _make_job_resource @@ -142,18 +141,22 @@ def test_to_dataframe_bqstorage_preserve_order(query, table_read_options_kwarg): session = bigquery_storage.types.ReadSession() session.arrow_schema.serialized_schema = arrow_schema.serialize().to_pybytes() session.streams = [bigquery_storage.types.ReadStream(name=stream_id)] - bqstorage_client.create_read_session.return_value = session - bqstorage_base_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) - page = bigquery_storage.types.ReadRowsResponse() - if BQ_STORAGE_VERSIONS.is_read_session_optional: - page.arrow_schema.serialized_schema = arrow_schema.serialize().to_pybytes() - page.arrow_record_batch.serialized_record_batch = ( - record_batch.serialize().to_pybytes() + reader = mock.create_autospec( + google.cloud.bigquery_storage_v1.reader.ReadRowsStream, instance=True + ) + row_iterable = mock.create_autospec( + google.cloud.bigquery_storage_v1.reader.ReadRowsIterable, instance=True + ) + page = mock.create_autospec( + google.cloud.bigquery_storage_v1.reader.ReadRowsPage, instance=True ) - bqstorage_base_client.read_rows.return_value = [page] - reader = google.cloud.bigquery_storage_v1.reader.ReadRowsStream( - [page], bqstorage_base_client, stream_id, 0, {} + page.to_arrow.return_value = record_batch + type(row_iterable).pages = mock.PropertyMock(return_value=[page]) + reader.rows.return_value = row_iterable + bqstorage_client = mock.create_autospec( + bigquery_storage.BigQueryReadClient, instance=True ) + bqstorage_client.create_read_session.return_value = session bqstorage_client.read_rows.return_value = reader dataframe = job.to_dataframe(bqstorage_client=bqstorage_client) @@ -536,22 +539,25 @@ def test_to_dataframe_bqstorage(table_read_options_kwarg): connection = make_connection(query_resource) client = _make_client(connection=connection) job = target_class.from_api_repr(resource, client) - bqstorage_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) session = bigquery_storage.types.ReadSession() session.arrow_schema.serialized_schema = arrow_schema.serialize().to_pybytes() session.streams = [bigquery_storage.types.ReadStream(name=stream_id)] - bqstorage_client.create_read_session.return_value = session - bqstorage_base_client = mock.create_autospec(bigquery_storage.BigQueryReadClient) - page = bigquery_storage.types.ReadRowsResponse() - if BQ_STORAGE_VERSIONS.is_read_session_optional: - page.arrow_schema.serialized_schema = arrow_schema.serialize().to_pybytes() - page.arrow_record_batch.serialized_record_batch = ( - record_batch.serialize().to_pybytes() + reader = mock.create_autospec( + google.cloud.bigquery_storage_v1.reader.ReadRowsStream, instance=True + ) + row_iterable = mock.create_autospec( + google.cloud.bigquery_storage_v1.reader.ReadRowsIterable, instance=True ) - bqstorage_base_client.read_rows.return_value = [page] - reader = google.cloud.bigquery_storage_v1.reader.ReadRowsStream( - [page], bqstorage_base_client, stream_id, 0, {} + page = mock.create_autospec( + google.cloud.bigquery_storage_v1.reader.ReadRowsPage, instance=True ) + page.to_arrow.return_value = record_batch + type(row_iterable).pages = mock.PropertyMock(return_value=[page]) + reader.rows.return_value = row_iterable + bqstorage_client = mock.create_autospec( + bigquery_storage.BigQueryReadClient, instance=True + ) + bqstorage_client.create_read_session.return_value = session bqstorage_client.read_rows.return_value = reader dataframe = job.to_dataframe(bqstorage_client=bqstorage_client)