12
12
# See the License for the specific language governing permissions and
13
13
# limitations under the License.
14
14
15
- import uuid
16
-
17
15
import pytest
18
16
19
17
@@ -72,48 +70,11 @@ def test_table_to_dataframe(capsys, clients):
72
70
assert "country_name" in out
73
71
74
72
75
- @pytest .fixture
76
- def temporary_dataset (clients ):
77
- from google .cloud import bigquery
78
-
79
- bqclient , _ = clients
80
-
81
- # [START bigquerystorage_pandas_tutorial_all]
82
- # [START bigquerystorage_pandas_tutorial_create_dataset]
83
- # Set the dataset_id to the dataset used to store temporary results.
84
- dataset_id = "query_results_dataset"
85
- # [END bigquerystorage_pandas_tutorial_create_dataset]
86
- # [END bigquerystorage_pandas_tutorial_all]
87
-
88
- dataset_id = "bqstorage_to_dataset_{}" .format (uuid .uuid4 ().hex )
89
-
90
- # [START bigquerystorage_pandas_tutorial_all]
91
- # [START bigquerystorage_pandas_tutorial_create_dataset]
92
- dataset_ref = bqclient .dataset (dataset_id )
93
- dataset = bigquery .Dataset (dataset_ref )
94
-
95
- # Remove tables after 24 hours.
96
- dataset .default_table_expiration_ms = 1000 * 60 * 60 * 24
97
-
98
- bqclient .create_dataset (dataset ) # API request.
99
- # [END bigquerystorage_pandas_tutorial_create_dataset]
100
- # [END bigquerystorage_pandas_tutorial_all]
101
- yield dataset_ref
102
- # [START bigquerystorage_pandas_tutorial_cleanup]
103
- bqclient .delete_dataset (dataset_ref , delete_contents = True )
104
- # [END bigquerystorage_pandas_tutorial_cleanup]
105
-
106
-
107
- def test_query_to_dataframe (capsys , clients , temporary_dataset ):
108
- from google .cloud import bigquery
109
-
73
+ def test_query_to_dataframe (capsys , clients ):
110
74
bqclient , bqstorageclient = clients
111
- dataset_ref = temporary_dataset
112
75
113
76
# [START bigquerystorage_pandas_tutorial_all]
114
77
# [START bigquerystorage_pandas_tutorial_read_query_results]
115
- import uuid
116
-
117
78
# Download query results.
118
79
query_string = """
119
80
SELECT
@@ -125,19 +86,15 @@ def test_query_to_dataframe(capsys, clients, temporary_dataset):
125
86
WHERE tags like '%google-bigquery%'
126
87
ORDER BY view_count DESC
127
88
"""
128
- # Use a random table name to avoid overwriting existing tables.
129
- table_id = "queryresults_" + uuid .uuid4 ().hex
130
- table = dataset_ref .table (table_id )
131
- query_config = bigquery .QueryJobConfig (
132
- # Due to a known issue in the BigQuery Storage API, small query result
133
- # sets cannot be downloaded. To workaround this issue, write results to
134
- # a destination table.
135
- destination = table
136
- )
137
89
138
90
dataframe = (
139
- bqclient .query (query_string , job_config = query_config )
91
+ bqclient .query (query_string )
140
92
.result ()
93
+
94
+ # Note: The BigQuery Storage API cannot be used to download small query
95
+ # results, but as of google-cloud-bigquery version 1.11.1, the
96
+ # to_dataframe method will fallback to the tabledata.list API when the
97
+ # BigQuery Storage API fails to read the query results.
141
98
.to_dataframe (bqstorage_client = bqstorageclient )
142
99
)
143
100
print (dataframe .head ())
0 commit comments