2626
2727try :
2828 from google .cloud import bigquery_storage_v1beta1
29+ from google .cloud .bigquery_storage_v1beta1 .gapic .transports import (
30+ big_query_storage_grpc_transport ,
31+ )
2932except ImportError : # pragma: NO COVER
3033 bigquery_storage_v1beta1 = None
34+ big_query_storage_grpc_transport = None
3135
3236try :
3337 import pandas
@@ -1817,6 +1821,9 @@ def test_to_arrow_w_bqstorage(self):
18171821 bqstorage_client = mock .create_autospec (
18181822 bigquery_storage_v1beta1 .BigQueryStorageClient
18191823 )
1824+ bqstorage_client .transport = mock .create_autospec (
1825+ big_query_storage_grpc_transport .BigQueryStorageGrpcTransport
1826+ )
18201827 streams = [
18211828 # Use two streams we want to check frames are read from each stream.
18221829 {"name" : "/projects/proj/dataset/dset/tables/tbl/streams/1234" },
@@ -1882,6 +1889,9 @@ def test_to_arrow_w_bqstorage(self):
18821889 total_rows = expected_num_rows * total_pages
18831890 self .assertEqual (actual_tbl .num_rows , total_rows )
18841891
1892+ # Don't close the client if it was passed in.
1893+ bqstorage_client .transport .channel .close .assert_not_called ()
1894+
18851895 @unittest .skipIf (pyarrow is None , "Requires `pyarrow`" )
18861896 @unittest .skipIf (
18871897 bigquery_storage_v1beta1 is None , "Requires `google-cloud-bigquery-storage`"
@@ -1894,6 +1904,9 @@ def test_to_arrow_w_bqstorage_creates_client(self):
18941904 bqstorage_client = mock .create_autospec (
18951905 bigquery_storage_v1beta1 .BigQueryStorageClient
18961906 )
1907+ bqstorage_client .transport = mock .create_autospec (
1908+ big_query_storage_grpc_transport .BigQueryStorageGrpcTransport
1909+ )
18971910 mock_client ._create_bqstorage_client .return_value = bqstorage_client
18981911 session = bigquery_storage_v1beta1 .types .ReadSession ()
18991912 bqstorage_client .create_read_session .return_value = session
@@ -1910,6 +1923,7 @@ def test_to_arrow_w_bqstorage_creates_client(self):
19101923 )
19111924 row_iterator .to_arrow (create_bqstorage_client = True )
19121925 mock_client ._create_bqstorage_client .assert_called_once ()
1926+ bqstorage_client .transport .channel .close .assert_called_once ()
19131927
19141928 @unittest .skipIf (pyarrow is None , "Requires `pyarrow`" )
19151929 @unittest .skipIf (
@@ -2321,6 +2335,43 @@ def test_to_dataframe_max_results_w_bqstorage_warning(self):
23212335 ]
23222336 self .assertEqual (len (matches ), 1 , msg = "User warning was not emitted." )
23232337
2338+ @unittest .skipIf (pandas is None , "Requires `pandas`" )
2339+ def test_to_dataframe_max_results_w_create_bqstorage_warning (self ):
2340+ from google .cloud .bigquery .schema import SchemaField
2341+
2342+ schema = [
2343+ SchemaField ("name" , "STRING" , mode = "REQUIRED" ),
2344+ SchemaField ("age" , "INTEGER" , mode = "REQUIRED" ),
2345+ ]
2346+ rows = [
2347+ {"f" : [{"v" : "Phred Phlyntstone" }, {"v" : "32" }]},
2348+ {"f" : [{"v" : "Bharney Rhubble" }, {"v" : "33" }]},
2349+ ]
2350+ path = "/foo"
2351+ api_request = mock .Mock (return_value = {"rows" : rows })
2352+ mock_client = _mock_client ()
2353+
2354+ row_iterator = self ._make_one (
2355+ client = mock_client ,
2356+ api_request = api_request ,
2357+ path = path ,
2358+ schema = schema ,
2359+ max_results = 42 ,
2360+ )
2361+
2362+ with warnings .catch_warnings (record = True ) as warned :
2363+ row_iterator .to_dataframe (create_bqstorage_client = True )
2364+
2365+ matches = [
2366+ warning
2367+ for warning in warned
2368+ if warning .category is UserWarning
2369+ and "cannot use bqstorage_client" in str (warning ).lower ()
2370+ and "tabledata.list" in str (warning )
2371+ ]
2372+ self .assertEqual (len (matches ), 1 , msg = "User warning was not emitted." )
2373+ mock_client ._create_bqstorage_client .assert_not_called ()
2374+
23242375 @unittest .skipIf (pandas is None , "Requires `pandas`" )
23252376 @unittest .skipIf (
23262377 bigquery_storage_v1beta1 is None , "Requires `google-cloud-bigquery-storage`"
@@ -2333,6 +2384,9 @@ def test_to_dataframe_w_bqstorage_creates_client(self):
23332384 bqstorage_client = mock .create_autospec (
23342385 bigquery_storage_v1beta1 .BigQueryStorageClient
23352386 )
2387+ bqstorage_client .transport = mock .create_autospec (
2388+ big_query_storage_grpc_transport .BigQueryStorageGrpcTransport
2389+ )
23362390 mock_client ._create_bqstorage_client .return_value = bqstorage_client
23372391 session = bigquery_storage_v1beta1 .types .ReadSession ()
23382392 bqstorage_client .create_read_session .return_value = session
@@ -2349,6 +2403,7 @@ def test_to_dataframe_w_bqstorage_creates_client(self):
23492403 )
23502404 row_iterator .to_dataframe (create_bqstorage_client = True )
23512405 mock_client ._create_bqstorage_client .assert_called_once ()
2406+ bqstorage_client .transport .channel .close .assert_called_once ()
23522407
23532408 @unittest .skipIf (pandas is None , "Requires `pandas`" )
23542409 @unittest .skipIf (
@@ -2485,6 +2540,9 @@ def test_to_dataframe_w_bqstorage_nonempty(self):
24852540 bqstorage_client = mock .create_autospec (
24862541 bigquery_storage_v1beta1 .BigQueryStorageClient
24872542 )
2543+ bqstorage_client .transport = mock .create_autospec (
2544+ big_query_storage_grpc_transport .BigQueryStorageGrpcTransport
2545+ )
24882546 streams = [
24892547 # Use two streams we want to check frames are read from each stream.
24902548 {"name" : "/projects/proj/dataset/dset/tables/tbl/streams/1234" },
@@ -2539,6 +2597,9 @@ def test_to_dataframe_w_bqstorage_nonempty(self):
25392597 total_rows = len (page_items ) * total_pages
25402598 self .assertEqual (len (got .index ), total_rows )
25412599
2600+ # Don't close the client if it was passed in.
2601+ bqstorage_client .transport .channel .close .assert_not_called ()
2602+
25422603 @unittest .skipIf (pandas is None , "Requires `pandas`" )
25432604 @unittest .skipIf (
25442605 bigquery_storage_v1beta1 is None , "Requires `google-cloud-bigquery-storage`"
0 commit comments