From a7142a2b43023b5f864e83469f2e31e6adcc2cee Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 12 Jul 2019 13:22:54 -0500 Subject: [PATCH 001/338] Update quickstart sample with data format and sharding options. (#8665) * Update quickstart sample with data format and sharding options. * Add symlink in docs to samples. Use localdeps for system and unit tests. --- bigquery_storage/__init__.py | 0 bigquery_storage/quickstart.py | 100 ++++++++++++++++++++++ bigquery_storage/requirements.txt | 1 + bigquery_storage/tests/__init__.py | 0 bigquery_storage/tests/quickstart_test.py | 44 ++++++++++ 5 files changed, 145 insertions(+) create mode 100644 bigquery_storage/__init__.py create mode 100644 bigquery_storage/quickstart.py create mode 100644 bigquery_storage/requirements.txt create mode 100644 bigquery_storage/tests/__init__.py create mode 100644 bigquery_storage/tests/quickstart_test.py diff --git a/bigquery_storage/__init__.py b/bigquery_storage/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/bigquery_storage/quickstart.py b/bigquery_storage/quickstart.py new file mode 100644 index 00000000000..8a3b8617b38 --- /dev/null +++ b/bigquery_storage/quickstart.py @@ -0,0 +1,100 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse + + +def main(project_id="your-project-id", snapshot_millis=0): + # [START bigquerystorage_quickstart] + from google.cloud import bigquery_storage_v1beta1 + + # TODO(developer): Set the project_id variable. + # project_id = 'your-project-id' + # + # The read session is created in this project. This project can be + # different from that which contains the table. + + client = bigquery_storage_v1beta1.BigQueryStorageClient() + + # This example reads baby name data from the public datasets. + table_ref = bigquery_storage_v1beta1.types.TableReference() + table_ref.project_id = "bigquery-public-data" + table_ref.dataset_id = "usa_names" + table_ref.table_id = "usa_1910_current" + + # We limit the output columns to a subset of those allowed in the table, + # and set a simple filter to only report names from the state of + # Washington (WA). + read_options = bigquery_storage_v1beta1.types.TableReadOptions() + read_options.selected_fields.append("name") + read_options.selected_fields.append("number") + read_options.selected_fields.append("state") + read_options.row_restriction = 'state = "WA"' + + # Set a snapshot time if it's been specified. + modifiers = None + if snapshot_millis > 0: + modifiers = bigquery_storage_v1beta1.types.TableModifiers() + modifiers.snapshot_time.FromMilliseconds(snapshot_millis) + + parent = "projects/{}".format(project_id) + session = client.create_read_session( + table_ref, + parent, + table_modifiers=modifiers, + read_options=read_options, + # This API can also deliver data serialized in Apache Arrow format. + # This example leverages Apache Avro. + format_=bigquery_storage_v1beta1.enums.DataFormat.AVRO, + # We use a LIQUID strategy in this example because we only read from a + # single stream. Consider BALANCED if you're consuming multiple streams + # concurrently and want more consistent stream sizes. + sharding_strategy=(bigquery_storage_v1beta1.enums.ShardingStrategy.LIQUID), + ) # API request. + + # We'll use only a single stream for reading data from the table. Because + # of dynamic sharding, this will yield all the rows in the table. However, + # if you wanted to fan out multiple readers you could do so by having a + # reader process each individual stream. + reader = client.read_rows( + bigquery_storage_v1beta1.types.StreamPosition(stream=session.streams[0]) + ) + + # The read stream contains blocks of Avro-encoded bytes. The rows() method + # uses the fastavro library to parse these blocks as an interable of Python + # dictionaries. Install fastavro with the following command: + # + # pip install google-cloud-bigquery-storage[fastavro] + rows = reader.rows(session) + + # Do any local processing by iterating over the rows. The + # google-cloud-bigquery-storage client reconnects to the API after any + # transient network errors or timeouts. + names = set() + states = set() + + for row in rows: + names.add(row["name"]) + states.add(row["state"]) + + print("Got {} unique names in states: {}".format(len(names), states)) + # [END bigquerystorage_quickstart] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("project_id") + parser.add_argument("--snapshot_millis", default=0, type=int) + args = parser.parse_args() + main(project_id=args.project_id) diff --git a/bigquery_storage/requirements.txt b/bigquery_storage/requirements.txt new file mode 100644 index 00000000000..acd0800e713 --- /dev/null +++ b/bigquery_storage/requirements.txt @@ -0,0 +1 @@ +fastavro \ No newline at end of file diff --git a/bigquery_storage/tests/__init__.py b/bigquery_storage/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/bigquery_storage/tests/quickstart_test.py b/bigquery_storage/tests/quickstart_test.py new file mode 100644 index 00000000000..fde039f4620 --- /dev/null +++ b/bigquery_storage/tests/quickstart_test.py @@ -0,0 +1,44 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import os + +import pytest + +from .. import quickstart + + +def now_millis(): + return int( + (datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)).total_seconds() + * 1000 + ) + + +@pytest.fixture() +def project_id(): + return os.environ["PROJECT_ID"] + + +def test_quickstart_wo_snapshot(capsys, project_id): + quickstart.main(project_id) + out, _ = capsys.readouterr() + assert "WA" in out + + +def test_quickstart_with_snapshot(capsys, project_id): + quickstart.main(project_id, now_millis() - 5000) + out, _ = capsys.readouterr() + assert "WA" in out From b38671551ba7aad37641f6b7ecff7f593708a5f7 Mon Sep 17 00:00:00 2001 From: Alexander Fenster Date: Tue, 3 Mar 2020 09:29:34 -0800 Subject: [PATCH 002/338] fix: pass snapshot_millis to the main function (#8) Co-authored-by: Tim Swast --- bigquery_storage/quickstart.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/quickstart.py b/bigquery_storage/quickstart.py index 8a3b8617b38..192039846da 100644 --- a/bigquery_storage/quickstart.py +++ b/bigquery_storage/quickstart.py @@ -97,4 +97,4 @@ def main(project_id="your-project-id", snapshot_millis=0): parser.add_argument("project_id") parser.add_argument("--snapshot_millis", default=0, type=int) args = parser.parse_args() - main(project_id=args.project_id) + main(project_id=args.project_id, snapshot_millis=args.snapshot_millis) From 582b4b89abbce48e6b7865dbbc0538c20adca3b6 Mon Sep 17 00:00:00 2001 From: shollyman Date: Tue, 3 Mar 2020 16:34:47 -0800 Subject: [PATCH 003/338] chore: update quickstart to use v1 endpoint (#19) * chore: update quickstart to leverage v1 endpoint --- bigquery_storage/quickstart.py | 55 +++++++++++++++------------------- 1 file changed, 24 insertions(+), 31 deletions(-) diff --git a/bigquery_storage/quickstart.py b/bigquery_storage/quickstart.py index 192039846da..89a2448d28f 100644 --- a/bigquery_storage/quickstart.py +++ b/bigquery_storage/quickstart.py @@ -17,7 +17,7 @@ def main(project_id="your-project-id", snapshot_millis=0): # [START bigquerystorage_quickstart] - from google.cloud import bigquery_storage_v1beta1 + from google.cloud import bigquery_storage_v1 # TODO(developer): Set the project_id variable. # project_id = 'your-project-id' @@ -25,51 +25,44 @@ def main(project_id="your-project-id", snapshot_millis=0): # The read session is created in this project. This project can be # different from that which contains the table. - client = bigquery_storage_v1beta1.BigQueryStorageClient() + client = bigquery_storage_v1.BigQueryReadClient() # This example reads baby name data from the public datasets. - table_ref = bigquery_storage_v1beta1.types.TableReference() - table_ref.project_id = "bigquery-public-data" - table_ref.dataset_id = "usa_names" - table_ref.table_id = "usa_1910_current" + table = "projects/{}/datasets/{}/tables/{}".format( + "bigquery-public-data", "usa_names", "usa_1910_current" + ) + + requested_session = bigquery_storage_v1.types.ReadSession() + requested_session.table = table + # This API can also deliver data serialized in Apache Arrow format. + # This example leverages Apache Avro. + requested_session.data_format = bigquery_storage_v1.enums.DataFormat.AVRO # We limit the output columns to a subset of those allowed in the table, # and set a simple filter to only report names from the state of # Washington (WA). - read_options = bigquery_storage_v1beta1.types.TableReadOptions() - read_options.selected_fields.append("name") - read_options.selected_fields.append("number") - read_options.selected_fields.append("state") - read_options.row_restriction = 'state = "WA"' + requested_session.read_options.selected_fields.append("name") + requested_session.read_options.selected_fields.append("number") + requested_session.read_options.selected_fields.append("state") + requested_session.read_options.row_restriction = 'state = "WA"' # Set a snapshot time if it's been specified. modifiers = None if snapshot_millis > 0: - modifiers = bigquery_storage_v1beta1.types.TableModifiers() - modifiers.snapshot_time.FromMilliseconds(snapshot_millis) + requested_session.table_modifiers.snapshot_time.FromMilliseconds( + snapshot_millis + ) parent = "projects/{}".format(project_id) session = client.create_read_session( - table_ref, parent, - table_modifiers=modifiers, - read_options=read_options, - # This API can also deliver data serialized in Apache Arrow format. - # This example leverages Apache Avro. - format_=bigquery_storage_v1beta1.enums.DataFormat.AVRO, - # We use a LIQUID strategy in this example because we only read from a - # single stream. Consider BALANCED if you're consuming multiple streams - # concurrently and want more consistent stream sizes. - sharding_strategy=(bigquery_storage_v1beta1.enums.ShardingStrategy.LIQUID), - ) # API request. - - # We'll use only a single stream for reading data from the table. Because - # of dynamic sharding, this will yield all the rows in the table. However, - # if you wanted to fan out multiple readers you could do so by having a - # reader process each individual stream. - reader = client.read_rows( - bigquery_storage_v1beta1.types.StreamPosition(stream=session.streams[0]) + requested_session, + # We'll use only a single stream for reading data from the table. However, + # if you wanted to fan out multiple readers you could do so by having a + # reader process each individual stream. + max_stream_count=1, ) + reader = client.read_rows(session.streams[0].name) # The read stream contains blocks of Avro-encoded bytes. The rows() method # uses the fastavro library to parse these blocks as an interable of Python From 224e93f83293bb42c824be6689323369b08c7234 Mon Sep 17 00:00:00 2001 From: shollyman Date: Wed, 15 Apr 2020 10:11:21 -0700 Subject: [PATCH 004/338] docs: fix typo in storage quickstart comment (#23) * docs: fix typo in storage quickstart comment --- bigquery_storage/quickstart.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/quickstart.py b/bigquery_storage/quickstart.py index 89a2448d28f..8358fdc1336 100644 --- a/bigquery_storage/quickstart.py +++ b/bigquery_storage/quickstart.py @@ -65,7 +65,7 @@ def main(project_id="your-project-id", snapshot_millis=0): reader = client.read_rows(session.streams[0].name) # The read stream contains blocks of Avro-encoded bytes. The rows() method - # uses the fastavro library to parse these blocks as an interable of Python + # uses the fastavro library to parse these blocks as an iterable of Python # dictionaries. Install fastavro with the following command: # # pip install google-cloud-bigquery-storage[fastavro] From 81bb6f7c0830fa3db16b2dd174ee7238cbec25fa Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 7 Feb 2019 10:53:05 -0800 Subject: [PATCH 005/338] BigQuery Storage API sample for reading pandas dataframe [(#1994)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1994) * BigQuery Storage API sample for reading pandas dataframe How to get a pandas DataFrame, fast! The first two examples use the existing BigQuery client. These examples create a thread pool and read in parallel. The final example shows using just the new BigQuery Storage client, but only shows how to read with a single thread. --- bigquery_storage/to_dataframe/__init__.py | 0 bigquery_storage/to_dataframe/main_test.py | 190 ++++++++++++++++++ .../to_dataframe/requirements.txt | 5 + 3 files changed, 195 insertions(+) create mode 100644 bigquery_storage/to_dataframe/__init__.py create mode 100644 bigquery_storage/to_dataframe/main_test.py create mode 100644 bigquery_storage/to_dataframe/requirements.txt diff --git a/bigquery_storage/to_dataframe/__init__.py b/bigquery_storage/to_dataframe/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/bigquery_storage/to_dataframe/main_test.py b/bigquery_storage/to_dataframe/main_test.py new file mode 100644 index 00000000000..053bd778918 --- /dev/null +++ b/bigquery_storage/to_dataframe/main_test.py @@ -0,0 +1,190 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import uuid + +import pytest + + +@pytest.fixture +def clients(): + # [START bigquerystorage_pandas_tutorial_all] + # [START bigquerystorage_pandas_tutorial_create_client] + import google.auth + from google.cloud import bigquery + from google.cloud import bigquery_storage_v1beta1 + + # Explicitly create a credentials object. This allows you to use the same + # credentials for both the BigQuery and BigQuery Storage clients, avoiding + # unnecessary API calls to fetch duplicate authentication tokens. + credentials, your_project_id = google.auth.default( + scopes=["https://www.googleapis.com/auth/cloud-platform"] + ) + + # Make clients. + bqclient = bigquery.Client( + credentials=credentials, + project=your_project_id + ) + bqstorageclient = bigquery_storage_v1beta1.BigQueryStorageClient( + credentials=credentials + ) + # [END bigquerystorage_pandas_tutorial_create_client] + # [END bigquerystorage_pandas_tutorial_all] + return bqclient, bqstorageclient + + +def test_table_to_dataframe(capsys, clients): + from google.cloud import bigquery + + bqclient, bqstorageclient = clients + + # [START bigquerystorage_pandas_tutorial_all] + # [START bigquerystorage_pandas_tutorial_read_table] + # Download a table. + table = bigquery.TableReference.from_string( + "bigquery-public-data.utility_us.country_code_iso" + ) + rows = bqclient.list_rows( + table, + selected_fields=[ + bigquery.SchemaField("country_name", "STRING"), + bigquery.SchemaField("fips_code", "STRING"), + ], + ) + dataframe = rows.to_dataframe(bqstorage_client=bqstorageclient) + print(dataframe.head()) + # [END bigquerystorage_pandas_tutorial_read_table] + # [END bigquerystorage_pandas_tutorial_all] + + out, _ = capsys.readouterr() + assert "country_name" in out + + +@pytest.fixture +def temporary_dataset(clients): + from google.cloud import bigquery + + bqclient, _ = clients + + # [START bigquerystorage_pandas_tutorial_all] + # [START bigquerystorage_pandas_tutorial_create_dataset] + # Set the dataset_id to the dataset used to store temporary results. + dataset_id = "query_results_dataset" + # [END bigquerystorage_pandas_tutorial_create_dataset] + # [END bigquerystorage_pandas_tutorial_all] + + dataset_id = "bqstorage_to_dataset_{}".format(uuid.uuid4().hex) + + # [START bigquerystorage_pandas_tutorial_all] + # [START bigquerystorage_pandas_tutorial_create_dataset] + dataset_ref = bqclient.dataset(dataset_id) + dataset = bigquery.Dataset(dataset_ref) + + # Remove tables after 24 hours. + dataset.default_table_expiration_ms = 1000 * 60 * 60 * 24 + + bqclient.create_dataset(dataset) # API request. + # [END bigquerystorage_pandas_tutorial_create_dataset] + # [END bigquerystorage_pandas_tutorial_all] + yield dataset_ref + # [START bigquerystorage_pandas_tutorial_cleanup] + bqclient.delete_dataset(dataset_ref, delete_contents=True) + # [END bigquerystorage_pandas_tutorial_cleanup] + + +def test_query_to_dataframe(capsys, clients, temporary_dataset): + from google.cloud import bigquery + + bqclient, bqstorageclient = clients + dataset_ref = temporary_dataset + + # [START bigquerystorage_pandas_tutorial_all] + # [START bigquerystorage_pandas_tutorial_read_query_results] + import uuid + + # Download query results. + query_string = """ + SELECT + CONCAT( + 'https://stackoverflow.com/questions/', + CAST(id as STRING)) as url, + view_count + FROM `bigquery-public-data.stackoverflow.posts_questions` + WHERE tags like '%google-bigquery%' + ORDER BY view_count DESC + """ + # Use a random table name to avoid overwriting existing tables. + table_id = "queryresults_" + uuid.uuid4().hex + table = dataset_ref.table(table_id) + query_config = bigquery.QueryJobConfig( + # Due to a known issue in the BigQuery Storage API, small query result + # sets cannot be downloaded. To workaround this issue, write results to + # a destination table. + destination=table + ) + + dataframe = ( + bqclient.query(query_string, job_config=query_config) + .result() + .to_dataframe(bqstorage_client=bqstorageclient) + ) + print(dataframe.head()) + # [END bigquerystorage_pandas_tutorial_read_query_results] + # [END bigquerystorage_pandas_tutorial_all] + + out, _ = capsys.readouterr() + assert "stackoverflow" in out + + +def test_session_to_dataframe(capsys, clients): + from google.cloud import bigquery_storage_v1beta1 + + bqclient, bqstorageclient = clients + your_project_id = bqclient.project + + # [START bigquerystorage_pandas_tutorial_all] + # [START bigquerystorage_pandas_tutorial_read_session] + table = bigquery_storage_v1beta1.types.TableReference() + table.project_id = "bigquery-public-data" + table.dataset_id = "new_york_trees" + table.table_id = "tree_species" + + # Select columns to read with read options. If no read options are + # specified, the whole table is read. + read_options = bigquery_storage_v1beta1.types.TableReadOptions() + read_options.selected_fields.append("species_common_name") + read_options.selected_fields.append("fall_color") + + parent = "projects/{}".format(your_project_id) + session = bqstorageclient.create_read_session( + table, parent, read_options=read_options + ) + + # This example reads from only a single stream. Read from multiple streams + # to fetch data faster. Note that the session may not contain any streams + # if there are no rows to read. + stream = session.streams[0] + position = bigquery_storage_v1beta1.types.StreamPosition(stream=stream) + reader = bqstorageclient.read_rows(position) + + # Parse all Avro blocks and create a dataframe. This call requires a + # session, because the session contains the schema for the row blocks. + dataframe = reader.to_dataframe(session) + print(dataframe.head()) + # [END bigquerystorage_pandas_tutorial_read_session] + # [END bigquerystorage_pandas_tutorial_all] + + out, _ = capsys.readouterr() + assert "species_common_name" in out diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt new file mode 100644 index 00000000000..29d1de558f0 --- /dev/null +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -0,0 +1,5 @@ +google-auth==1.6.2 +google-cloud-bigquery-storage==0.2.0 +google-cloud-bigquery==1.8.1 +fastavro==0.21.17 +pandas==0.24.0 \ No newline at end of file From 7ec201f4905acb063b09aafcf9888832524ad3a4 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 4 Apr 2019 12:29:03 -0700 Subject: [PATCH 006/338] Remove temporary dataset from bqstorage pandas tutorial [(#2088)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2088) * Remove temporary dataset from bqstorage pandas tutorial As of google-cloud-bigquery version 1.11.1, the `to_dataframe` method will fallback to the tabledata.list API when the BigQuery Storage API fails to read the query results. * Remove unused imports --- bigquery_storage/to_dataframe/main_test.py | 57 +++---------------- .../to_dataframe/requirements.txt | 4 +- 2 files changed, 9 insertions(+), 52 deletions(-) diff --git a/bigquery_storage/to_dataframe/main_test.py b/bigquery_storage/to_dataframe/main_test.py index 053bd778918..586ab3f94e9 100644 --- a/bigquery_storage/to_dataframe/main_test.py +++ b/bigquery_storage/to_dataframe/main_test.py @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import uuid - import pytest @@ -72,48 +70,11 @@ def test_table_to_dataframe(capsys, clients): assert "country_name" in out -@pytest.fixture -def temporary_dataset(clients): - from google.cloud import bigquery - - bqclient, _ = clients - - # [START bigquerystorage_pandas_tutorial_all] - # [START bigquerystorage_pandas_tutorial_create_dataset] - # Set the dataset_id to the dataset used to store temporary results. - dataset_id = "query_results_dataset" - # [END bigquerystorage_pandas_tutorial_create_dataset] - # [END bigquerystorage_pandas_tutorial_all] - - dataset_id = "bqstorage_to_dataset_{}".format(uuid.uuid4().hex) - - # [START bigquerystorage_pandas_tutorial_all] - # [START bigquerystorage_pandas_tutorial_create_dataset] - dataset_ref = bqclient.dataset(dataset_id) - dataset = bigquery.Dataset(dataset_ref) - - # Remove tables after 24 hours. - dataset.default_table_expiration_ms = 1000 * 60 * 60 * 24 - - bqclient.create_dataset(dataset) # API request. - # [END bigquerystorage_pandas_tutorial_create_dataset] - # [END bigquerystorage_pandas_tutorial_all] - yield dataset_ref - # [START bigquerystorage_pandas_tutorial_cleanup] - bqclient.delete_dataset(dataset_ref, delete_contents=True) - # [END bigquerystorage_pandas_tutorial_cleanup] - - -def test_query_to_dataframe(capsys, clients, temporary_dataset): - from google.cloud import bigquery - +def test_query_to_dataframe(capsys, clients): bqclient, bqstorageclient = clients - dataset_ref = temporary_dataset # [START bigquerystorage_pandas_tutorial_all] # [START bigquerystorage_pandas_tutorial_read_query_results] - import uuid - # Download query results. query_string = """ SELECT @@ -125,19 +86,15 @@ def test_query_to_dataframe(capsys, clients, temporary_dataset): WHERE tags like '%google-bigquery%' ORDER BY view_count DESC """ - # Use a random table name to avoid overwriting existing tables. - table_id = "queryresults_" + uuid.uuid4().hex - table = dataset_ref.table(table_id) - query_config = bigquery.QueryJobConfig( - # Due to a known issue in the BigQuery Storage API, small query result - # sets cannot be downloaded. To workaround this issue, write results to - # a destination table. - destination=table - ) dataframe = ( - bqclient.query(query_string, job_config=query_config) + bqclient.query(query_string) .result() + + # Note: The BigQuery Storage API cannot be used to download small query + # results, but as of google-cloud-bigquery version 1.11.1, the + # to_dataframe method will fallback to the tabledata.list API when the + # BigQuery Storage API fails to read the query results. .to_dataframe(bqstorage_client=bqstorageclient) ) print(dataframe.head()) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 29d1de558f0..24b2a54697c 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==1.6.2 -google-cloud-bigquery-storage==0.2.0 -google-cloud-bigquery==1.8.1 +google-cloud-bigquery-storage==0.3.0 +google-cloud-bigquery==1.11.1 fastavro==0.21.17 pandas==0.24.0 \ No newline at end of file From af94873419037906d2b84ba5221a8f39a405fd2d Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 9 Apr 2019 14:13:10 -0700 Subject: [PATCH 007/338] Add magics tutorial with BigQuery Storage API integration. [(#2087)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2087) * Add magics tutorial with BigQuery Storage API integration. This is a notebooks tutorial, modeled after the Jupyter notebook example code for BigQuery. Use some caution when running these tests, as they run some large-ish (5 GB processed) queries and download about 500 MB worth of data. This is intentional, as the BigQuery Storage API is most useful for downloading large results. * Update deps. * Don't run big queries on Travis. --- bigquery_storage/to_dataframe/jupyter_test.py | 148 ++++++++++++++++++ .../to_dataframe/requirements.txt | 1 + 2 files changed, 149 insertions(+) create mode 100644 bigquery_storage/to_dataframe/jupyter_test.py diff --git a/bigquery_storage/to_dataframe/jupyter_test.py b/bigquery_storage/to_dataframe/jupyter_test.py new file mode 100644 index 00000000000..027d0c7d958 --- /dev/null +++ b/bigquery_storage/to_dataframe/jupyter_test.py @@ -0,0 +1,148 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import IPython +from IPython.terminal import interactiveshell +from IPython.testing import tools +import pytest + +# Ignore semicolon lint warning because semicolons are used in notebooks +# flake8: noqa E703 + + +@pytest.fixture(scope="session") +def ipython(): + config = tools.default_config() + config.TerminalInteractiveShell.simple_prompt = True + shell = interactiveshell.TerminalInteractiveShell.instance(config=config) + return shell + + +@pytest.fixture() +def ipython_interactive(request, ipython): + """Activate IPython's builtin hooks + + for the duration of the test scope. + """ + with ipython.builtin_trap: + yield ipython + + +def _strip_region_tags(sample_text): + """Remove blank lines and region tags from sample text""" + magic_lines = [ + line for line in sample_text.split("\n") if len(line) > 0 and "# [" not in line + ] + return "\n".join(magic_lines) + + +def test_jupyter_small_query(ipython): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + + # Include a small query to demonstrate that it falls back to the + # tabledata.list API when the BQ Storage API cannot be used. + sample = """ + # [START bigquerystorage_jupyter_tutorial_fallback] + %%bigquery stackoverflow --use_bqstorage_api + SELECT + CONCAT( + 'https://stackoverflow.com/questions/', + CAST(id as STRING)) as url, + view_count + FROM `bigquery-public-data.stackoverflow.posts_questions` + WHERE tags like '%google-bigquery%' + ORDER BY view_count DESC + LIMIT 10 + # [END bigquerystorage_jupyter_tutorial_fallback] + """ + + result = ip.run_cell(_strip_region_tags(sample)) + result.raise_error() # Throws an exception if the cell failed. + assert "stackoverflow" in ip.user_ns # verify that variable exists + + +@pytest.mark.skipif( + "TRAVIS" in os.environ, reason="Not running long-running queries on Travis" +) +def test_jupyter_tutorial(ipython): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + + # This code sample intentionally queries a lot of data to demonstrate the + # speed-up of using the BigQuery Storage API to download the results. + sample = """ + # [START bigquerystorage_jupyter_tutorial_query] + %%bigquery nodejs_deps --use_bqstorage_api + SELECT + dependency_name, + dependency_platform, + project_name, + project_id, + version_number, + version_id, + dependency_kind, + optional_dependency, + dependency_requirements, + dependency_project_id + FROM + `bigquery-public-data.libraries_io.dependencies` + WHERE + LOWER(dependency_platform) = 'npm' + LIMIT 2500000 + # [END bigquerystorage_jupyter_tutorial_query] + """ + result = ip.run_cell(_strip_region_tags(sample)) + result.raise_error() # Throws an exception if the cell failed. + + assert "nodejs_deps" in ip.user_ns # verify that variable exists + nodejs_deps = ip.user_ns["nodejs_deps"] + + # [START bigquerystorage_jupyter_tutorial_results] + nodejs_deps.head() + # [END bigquerystorage_jupyter_tutorial_results] + + # [START bigquerystorage_jupyter_tutorial_context] + import google.cloud.bigquery.magics + + google.cloud.bigquery.magics.context.use_bqstorage_api = True + # [END bigquerystorage_jupyter_tutorial_context] + + sample = """ + # [START bigquerystorage_jupyter_tutorial_query] + %%bigquery java_deps + SELECT + dependency_name, + dependency_platform, + project_name, + project_id, + version_number, + version_id, + dependency_kind, + optional_dependency, + dependency_requirements, + dependency_project_id + FROM + `bigquery-public-data.libraries_io.dependencies` + WHERE + LOWER(dependency_platform) = 'maven' + LIMIT 2500000 + # [END bigquerystorage_jupyter_tutorial_query] + """ + result = ip.run_cell(_strip_region_tags(sample)) + result.raise_error() # Throws an exception if the cell failed. + + assert "java_deps" in ip.user_ns # verify that variable exists diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 24b2a54697c..2fab885032f 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -2,4 +2,5 @@ google-auth==1.6.2 google-cloud-bigquery-storage==0.3.0 google-cloud-bigquery==1.11.1 fastavro==0.21.17 +ipython==7.2.0 pandas==0.24.0 \ No newline at end of file From 16512b336f4cf061b11eeb20bcbe8b25a9373994 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 9 Apr 2019 14:59:16 -0700 Subject: [PATCH 008/338] Add bigquerystorage_jupyter_tutorial_query_default region tag. --- bigquery_storage/to_dataframe/jupyter_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/to_dataframe/jupyter_test.py b/bigquery_storage/to_dataframe/jupyter_test.py index 027d0c7d958..ef1b0ddb74f 100644 --- a/bigquery_storage/to_dataframe/jupyter_test.py +++ b/bigquery_storage/to_dataframe/jupyter_test.py @@ -122,7 +122,7 @@ def test_jupyter_tutorial(ipython): # [END bigquerystorage_jupyter_tutorial_context] sample = """ - # [START bigquerystorage_jupyter_tutorial_query] + # [START bigquerystorage_jupyter_tutorial_query_default] %%bigquery java_deps SELECT dependency_name, @@ -140,7 +140,7 @@ def test_jupyter_tutorial(ipython): WHERE LOWER(dependency_platform) = 'maven' LIMIT 2500000 - # [END bigquerystorage_jupyter_tutorial_query] + # [END bigquerystorage_jupyter_tutorial_query_default] """ result = ip.run_cell(_strip_region_tags(sample)) result.raise_error() # Throws an exception if the cell failed. From 42fe8a4a7f1524cfc00086ac1ea55e58f3df3bc3 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 12 Jul 2019 12:37:21 -0500 Subject: [PATCH 009/338] BigQuery Storage: Update to use faster Arrow data format. [(#2269)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2269) --- bigquery_storage/to_dataframe/main_test.py | 20 ++++++++++++------- .../to_dataframe/requirements.txt | 8 ++++---- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/bigquery_storage/to_dataframe/main_test.py b/bigquery_storage/to_dataframe/main_test.py index 586ab3f94e9..8335b437063 100644 --- a/bigquery_storage/to_dataframe/main_test.py +++ b/bigquery_storage/to_dataframe/main_test.py @@ -33,7 +33,7 @@ def clients(): # Make clients. bqclient = bigquery.Client( credentials=credentials, - project=your_project_id + project=your_project_id, ) bqstorageclient = bigquery_storage_v1beta1.BigQueryStorageClient( credentials=credentials @@ -90,11 +90,6 @@ def test_query_to_dataframe(capsys, clients): dataframe = ( bqclient.query(query_string) .result() - - # Note: The BigQuery Storage API cannot be used to download small query - # results, but as of google-cloud-bigquery version 1.11.1, the - # to_dataframe method will fallback to the tabledata.list API when the - # BigQuery Storage API fails to read the query results. .to_dataframe(bqstorage_client=bqstorageclient) ) print(dataframe.head()) @@ -126,7 +121,18 @@ def test_session_to_dataframe(capsys, clients): parent = "projects/{}".format(your_project_id) session = bqstorageclient.create_read_session( - table, parent, read_options=read_options + table, + parent, + read_options=read_options, + # This API can also deliver data serialized in Apache Avro format. + # This example leverages Apache Arrow. + format_=bigquery_storage_v1beta1.enums.DataFormat.ARROW, + # We use a LIQUID strategy in this example because we only read from a + # single stream. Consider BALANCED if you're consuming multiple streams + # concurrently and want more consistent stream sizes. + sharding_strategy=( + bigquery_storage_v1beta1.enums.ShardingStrategy.LIQUID + ), ) # This example reads from only a single stream. Read from multiple streams diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 2fab885032f..5dad9dad47d 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.6.2 -google-cloud-bigquery-storage==0.3.0 -google-cloud-bigquery==1.11.1 -fastavro==0.21.17 +google-cloud-bigquery-storage==0.6.0 +google-cloud-bigquery==1.17.0 +pyarrow==0.13.0 ipython==7.2.0 -pandas==0.24.0 \ No newline at end of file +pandas==0.24.2 \ No newline at end of file From a2317a4a49d74692ec125ae32d877d3f438965a6 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 24 Sep 2019 15:55:51 -0700 Subject: [PATCH 010/338] Update pandas/bqstorage samples to latest library changes. [(#2413)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2413) --- bigquery_storage/to_dataframe/jupyter_test.py | 49 +++---------------- .../to_dataframe/requirements.txt | 8 +-- 2 files changed, 12 insertions(+), 45 deletions(-) diff --git a/bigquery_storage/to_dataframe/jupyter_test.py b/bigquery_storage/to_dataframe/jupyter_test.py index ef1b0ddb74f..7997ee2eac1 100644 --- a/bigquery_storage/to_dataframe/jupyter_test.py +++ b/bigquery_storage/to_dataframe/jupyter_test.py @@ -75,9 +75,6 @@ def test_jupyter_small_query(ipython): assert "stackoverflow" in ip.user_ns # verify that variable exists -@pytest.mark.skipif( - "TRAVIS" in os.environ, reason="Not running long-running queries on Travis" -) def test_jupyter_tutorial(ipython): ip = IPython.get_ipython() ip.extension_manager.load_extension("google.cloud.bigquery") @@ -86,33 +83,18 @@ def test_jupyter_tutorial(ipython): # speed-up of using the BigQuery Storage API to download the results. sample = """ # [START bigquerystorage_jupyter_tutorial_query] - %%bigquery nodejs_deps --use_bqstorage_api - SELECT - dependency_name, - dependency_platform, - project_name, - project_id, - version_number, - version_id, - dependency_kind, - optional_dependency, - dependency_requirements, - dependency_project_id - FROM - `bigquery-public-data.libraries_io.dependencies` - WHERE - LOWER(dependency_platform) = 'npm' - LIMIT 2500000 + %%bigquery tax_forms --use_bqstorage_api + SELECT * FROM `bigquery-public-data.irs_990.irs_990_2012` # [END bigquerystorage_jupyter_tutorial_query] """ result = ip.run_cell(_strip_region_tags(sample)) result.raise_error() # Throws an exception if the cell failed. - assert "nodejs_deps" in ip.user_ns # verify that variable exists - nodejs_deps = ip.user_ns["nodejs_deps"] + assert "tax_forms" in ip.user_ns # verify that variable exists + tax_forms = ip.user_ns["tax_forms"] # [START bigquerystorage_jupyter_tutorial_results] - nodejs_deps.head() + tax_forms.head() # [END bigquerystorage_jupyter_tutorial_results] # [START bigquerystorage_jupyter_tutorial_context] @@ -123,26 +105,11 @@ def test_jupyter_tutorial(ipython): sample = """ # [START bigquerystorage_jupyter_tutorial_query_default] - %%bigquery java_deps - SELECT - dependency_name, - dependency_platform, - project_name, - project_id, - version_number, - version_id, - dependency_kind, - optional_dependency, - dependency_requirements, - dependency_project_id - FROM - `bigquery-public-data.libraries_io.dependencies` - WHERE - LOWER(dependency_platform) = 'maven' - LIMIT 2500000 + %%bigquery tax_forms + SELECT * FROM `bigquery-public-data.irs_990.irs_990_2012` # [END bigquerystorage_jupyter_tutorial_query_default] """ result = ip.run_cell(_strip_region_tags(sample)) result.raise_error() # Throws an exception if the cell failed. - assert "java_deps" in ip.user_ns # verify that variable exists + assert "tax_forms" in ip.user_ns # verify that variable exists diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 5dad9dad47d..d5a1d3b5bb7 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.6.2 -google-cloud-bigquery-storage==0.6.0 -google-cloud-bigquery==1.17.0 -pyarrow==0.13.0 +google-cloud-bigquery-storage==0.7.0 +google-cloud-bigquery==1.20.0 +pyarrow==0.14.1 ipython==7.2.0 -pandas==0.24.2 \ No newline at end of file +pandas==0.25.1 \ No newline at end of file From 429a38b37eab50d40cce52a30f3b518127fea9ae Mon Sep 17 00:00:00 2001 From: Gus Class Date: Wed, 23 Oct 2019 16:27:00 -0700 Subject: [PATCH 011/338] Adds updates including compute [(#2436)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2436) * Adds updates including compute * Python 2 compat pytest * Fixing weird \r\n issue from GH merge * Put asset tests back in * Re-add pod operator test * Hack parameter for k8s pod operator --- bigquery_storage/to_dataframe/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index d5a1d3b5bb7..5b42cef17ef 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ -google-auth==1.6.2 +google-auth==1.6.3 google-cloud-bigquery-storage==0.7.0 google-cloud-bigquery==1.20.0 pyarrow==0.14.1 -ipython==7.2.0 -pandas==0.25.1 \ No newline at end of file +ipython==7.8.0 +pandas==0.25.1 From 8212f8c3bcdf0e8311e9c268f6495af2f113ad29 Mon Sep 17 00:00:00 2001 From: DPEBot Date: Fri, 20 Dec 2019 17:41:38 -0800 Subject: [PATCH 012/338] Auto-update dependencies. [(#2005)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2005) * Auto-update dependencies. * Revert update of appengine/flexible/datastore. * revert update of appengine/flexible/scipy * revert update of bigquery/bqml * revert update of bigquery/cloud-client * revert update of bigquery/datalab-migration * revert update of bigtable/quickstart * revert update of compute/api * revert update of container_registry/container_analysis * revert update of dataflow/run_template * revert update of datastore/cloud-ndb * revert update of dialogflow/cloud-client * revert update of dlp * revert update of functions/imagemagick * revert update of functions/ocr/app * revert update of healthcare/api-client/fhir * revert update of iam/api-client * revert update of iot/api-client/gcs_file_to_device * revert update of iot/api-client/mqtt_example * revert update of language/automl * revert update of run/image-processing * revert update of vision/automl * revert update testing/requirements.txt * revert update of vision/cloud-client/detect * revert update of vision/cloud-client/product_search * revert update of jobs/v2/api_client * revert update of jobs/v3/api_client * revert update of opencensus * revert update of translate/cloud-client * revert update to speech/cloud-client Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Co-authored-by: Doug Mahugh --- bigquery_storage/to_dataframe/requirements.txt | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 5b42cef17ef..d513f1b7d62 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ -google-auth==1.6.3 +google-auth==1.10.0 google-cloud-bigquery-storage==0.7.0 -google-cloud-bigquery==1.20.0 -pyarrow==0.14.1 -ipython==7.8.0 -pandas==0.25.1 +google-cloud-bigquery==1.23.1 +pyarrow==0.15.1 +ipython==7.10.2 +pandas==0.25.3 From 3f2e5fd8a833c198650ea2a8c3a5b7492813a8e5 Mon Sep 17 00:00:00 2001 From: "Leah E. Cole" <6719667+leahecole@users.noreply.github.com> Date: Thu, 5 Mar 2020 14:22:12 -0800 Subject: [PATCH 013/338] chore(deps): update dependency google-auth to v1.11.2 [(#2724)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2724) Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index d513f1b7d62..16905131d59 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.10.0 +google-auth==1.11.2 google-cloud-bigquery-storage==0.7.0 google-cloud-bigquery==1.23.1 pyarrow==0.15.1 From 3892b7b6ee29af7a8a4bc93db2d0d42ac5a3b61c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 6 Mar 2020 21:48:04 +0100 Subject: [PATCH 014/338] chore(deps): update dependency google-cloud-bigquery-storage to v0.8.0 [(#3050)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3050) * chore(deps): update dependency google-cloud-bigquery-storage to v0.8.0 * chore(deps): update pandas-gbq * chore(deps): update ipython * chore: update requirements.txt * chore: it is spelled version. * chore(deps): split pandas version * chore(deps): split pandas version Co-authored-by: Christopher Wilcox Co-authored-by: Leah Cole --- bigquery_storage/to_dataframe/requirements.txt | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 16905131d59..bb2be214f76 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,8 @@ google-auth==1.11.2 -google-cloud-bigquery-storage==0.7.0 +google-cloud-bigquery-storage==0.8.0 google-cloud-bigquery==1.23.1 pyarrow==0.15.1 -ipython==7.10.2 -pandas==0.25.3 +ipython==7.10.2; python_version > '3.0' +ipython==5.9.0; python_version < '3.0' +pandas==0.25.3; python_version > '3.0' +pandas==0.24.2; python_version < '3.0' From b74649d156c30cd8ffdd10b27cb1c1a8cace409a Mon Sep 17 00:00:00 2001 From: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Date: Wed, 1 Apr 2020 19:11:50 -0700 Subject: [PATCH 015/338] Simplify noxfile setup. [(#2806)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2806) * chore(deps): update dependency requests to v2.23.0 * Simplify noxfile and add version control. * Configure appengine/standard to only test Python 2.7. * Update Kokokro configs to match noxfile. * Add requirements-test to each folder. * Remove Py2 versions from everything execept appengine/standard. * Remove conftest.py. * Remove appengine/standard/conftest.py * Remove 'no-sucess-flaky-report' from pytest.ini. * Add GAE SDK back to appengine/standard tests. * Fix typo. * Roll pytest to python 2 version. * Add a bunch of testing requirements. * Remove typo. * Add appengine lib directory back in. * Add some additional requirements. * Fix issue with flake8 args. * Even more requirements. * Readd appengine conftest.py. * Add a few more requirements. * Even more Appengine requirements. * Add webtest for appengine/standard/mailgun. * Add some additional requirements. * Add workaround for issue with mailjet-rest. * Add responses for appengine/standard/mailjet. Co-authored-by: Renovate Bot --- bigquery_storage/to_dataframe/requirements-test.txt | 1 + 1 file changed, 1 insertion(+) create mode 100644 bigquery_storage/to_dataframe/requirements-test.txt diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt new file mode 100644 index 00000000000..781d4326c94 --- /dev/null +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -0,0 +1 @@ +pytest==5.3.2 From a1e31e0ed409d65bbf862ed3a7bcc530689f8817 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 17 Apr 2020 03:09:45 +0200 Subject: [PATCH 016/338] Update dependency google-auth to v1.14.0 [(#3148)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3148) Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index bb2be214f76..070e5d81088 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.11.2 +google-auth==1.14.0 google-cloud-bigquery-storage==0.8.0 google-cloud-bigquery==1.23.1 pyarrow==0.15.1 From 0f64dd977c17347d86c5f95061eb985c8ab84744 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 18 Apr 2020 07:41:12 +0200 Subject: [PATCH 017/338] chore(deps): update dependency google-cloud-bigquery to v1.24.0 [(#3049)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3049) * chore(deps): update dependency google-cloud-bigquery to v1.24.0 * chore(deps): update ipython version * fix: fix requirements order * explicitly add grpc to resolve errors * adjust arguments * undo mistake * bump auth version Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Co-authored-by: Leah Cole Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Co-authored-by: Christopher Wilcox --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 070e5d81088..37431c62c30 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.14.0 google-cloud-bigquery-storage==0.8.0 -google-cloud-bigquery==1.23.1 +google-cloud-bigquery==1.24.0 pyarrow==0.15.1 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From a715d26b6272ee28a1c7f460e2a523677ee276fd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 22 Apr 2020 05:40:40 +0200 Subject: [PATCH 018/338] Update dependency pyarrow to v0.17.0 [(#3188)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3188) Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 37431c62c30..b10d94899b1 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,7 +1,7 @@ google-auth==1.14.0 google-cloud-bigquery-storage==0.8.0 google-cloud-bigquery==1.24.0 -pyarrow==0.15.1 +pyarrow==0.17.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' pandas==0.25.3; python_version > '3.0' From 5bd013a2678dbc23f8ebbc4829433d71d78a1cc7 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 28 Apr 2020 06:20:12 +0200 Subject: [PATCH 019/338] chore(deps): update dependency google-auth to v1.14.1 [(#3464)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3464) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-auth](https://togithub.com/googleapis/google-auth-library-python) | patch | `==1.14.0` -> `==1.14.1` | | [google-auth](https://togithub.com/googleapis/google-auth-library-python) | minor | `==1.11.2` -> `==1.14.1` | --- ### Release Notes
googleapis/google-auth-library-python ### [`v1.14.1`](https://togithub.com/googleapis/google-auth-library-python/blob/master/CHANGELOG.md#​1141-httpswwwgithubcomgoogleapisgoogle-auth-library-pythoncomparev1140v1141-2020-04-21) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v1.14.0...v1.14.1)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about these updates again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#GoogleCloudPlatform/python-docs-samples). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index b10d94899b1..02b6e2178fb 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.14.0 +google-auth==1.14.1 google-cloud-bigquery-storage==0.8.0 google-cloud-bigquery==1.24.0 pyarrow==0.17.0 From 4a83644904d54f69265804087bf854c83bc7d3f6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 11 May 2020 22:24:11 +0200 Subject: [PATCH 020/338] chore(deps): update dependency google-auth to v1.14.2 [(#3724)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3724) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-auth](https://togithub.com/googleapis/google-auth-library-python) | patch | `==1.14.1` -> `==1.14.2` | --- ### Release Notes
googleapis/google-auth-library-python ### [`v1.14.2`](https://togithub.com/googleapis/google-auth-library-python/blob/master/CHANGELOG.md#​1142-httpswwwgithubcomgoogleapisgoogle-auth-library-pythoncomparev1141v1142-2020-05-07) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v1.14.1...v1.14.2)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#GoogleCloudPlatform/python-docs-samples). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 02b6e2178fb..4ad6f3ee53e 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.14.1 +google-auth==1.14.2 google-cloud-bigquery-storage==0.8.0 google-cloud-bigquery==1.24.0 pyarrow==0.17.0 From d1c6283cff9dd95f23a697fc53d34c0233153c11 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 13 May 2020 08:16:04 +0200 Subject: [PATCH 021/338] chore(deps): update dependency google-auth to v1.14.3 [(#3728)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3728) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-auth](https://togithub.com/googleapis/google-auth-library-python) | patch | `==1.14.2` -> `==1.14.3` | --- ### Release Notes
googleapis/google-auth-library-python ### [`v1.14.3`](https://togithub.com/googleapis/google-auth-library-python/blob/master/CHANGELOG.md#​1143-httpswwwgithubcomgoogleapisgoogle-auth-library-pythoncomparev1142v1143-2020-05-11) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v1.14.2...v1.14.3)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [x] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#GoogleCloudPlatform/python-docs-samples). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 4ad6f3ee53e..5ed5dc2ff28 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.14.2 +google-auth==1.14.3 google-cloud-bigquery-storage==0.8.0 google-cloud-bigquery==1.24.0 pyarrow==0.17.0 From e0034644f78b1e5b022e28cf500159dff49ba773 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 18 May 2020 23:47:52 +0200 Subject: [PATCH 022/338] chore(deps): update dependency pyarrow to v0.17.1 [(#3806)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3806) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 5ed5dc2ff28..7a90a31a19b 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,7 +1,7 @@ google-auth==1.14.3 google-cloud-bigquery-storage==0.8.0 google-cloud-bigquery==1.24.0 -pyarrow==0.17.0 +pyarrow==0.17.1 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' pandas==0.25.3; python_version > '3.0' From 1fc37d6ec77167f8b59b002bcb9d60f1c8d8a8ed Mon Sep 17 00:00:00 2001 From: "Leah E. Cole" <6719667+leahecole@users.noreply.github.com> Date: Mon, 18 May 2020 20:33:45 -0700 Subject: [PATCH 023/338] update google-auth to 1.15.0 part 2 [(#3815)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3815) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 7a90a31a19b..151995f8e85 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.14.3 +google-auth==1.15.0 google-cloud-bigquery-storage==0.8.0 google-cloud-bigquery==1.24.0 pyarrow==0.17.1 From bb9347e5b0a32db25d645b74e8d426b6518084a1 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 29 May 2020 00:27:36 +0200 Subject: [PATCH 024/338] chore(deps): update dependency google-auth to v1.16.0 [(#3903)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3903) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 151995f8e85..bb7f7eb8c3c 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.15.0 +google-auth==1.16.0 google-cloud-bigquery-storage==0.8.0 google-cloud-bigquery==1.24.0 pyarrow==0.17.1 From 918b1e18c383747e0f727b672ab57801a2530923 Mon Sep 17 00:00:00 2001 From: "Leah E. Cole" <6719667+leahecole@users.noreply.github.com> Date: Thu, 4 Jun 2020 14:34:05 -0700 Subject: [PATCH 025/338] update google-auth [(#3962)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3962) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index bb7f7eb8c3c..06d42322f1d 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.16.0 +google-auth==1.16.1 google-cloud-bigquery-storage==0.8.0 google-cloud-bigquery==1.24.0 pyarrow==0.17.1 From 6b0af9177116b9a93596ac7bff494da363d37637 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 5 Jun 2020 01:34:33 +0200 Subject: [PATCH 026/338] Update dependency google-cloud-bigquery-storage to v1 [(#3968)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3968) Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 06d42322f1d..2f8614ad0ca 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==1.16.1 -google-cloud-bigquery-storage==0.8.0 +google-cloud-bigquery-storage==1.0.0 google-cloud-bigquery==1.24.0 pyarrow==0.17.1 ipython==7.10.2; python_version > '3.0' From 657d8a4546b9fb054348a1b8d3061f6f085df6e3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 11 Jun 2020 06:36:11 +0200 Subject: [PATCH 027/338] Update dependency google-cloud-bigquery to v1.25.0 [(#4024)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4024) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-cloud-bigquery](https://togithub.com/googleapis/python-bigquery) | minor | `==1.24.0` -> `==1.25.0` | --- ### Release Notes
googleapis/python-bigquery ### [`v1.25.0`](https://togithub.com/googleapis/python-bigquery/blob/master/CHANGELOG.md#​1250-httpswwwgithubcomgoogleapispython-bigquerycomparev1240v1250-2020-06-06) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v1.24.0...v1.25.0) ##### Features - add BigQuery storage client support to DB API ([#​36](https://www.github.com/googleapis/python-bigquery/issues/36)) ([ba9b2f8](https://www.github.com/googleapis/python-bigquery/commit/ba9b2f87e36320d80f6f6460b77e6daddb0fa214)) - **bigquery:** add create job method ([#​32](https://www.github.com/googleapis/python-bigquery/issues/32)) ([2abdef8](https://www.github.com/googleapis/python-bigquery/commit/2abdef82bed31601d1ca1aa92a10fea1e09f5297)) - **bigquery:** add support of model for extract job ([#​71](https://www.github.com/googleapis/python-bigquery/issues/71)) ([4a7a514](https://www.github.com/googleapis/python-bigquery/commit/4a7a514659a9f6f9bbd8af46bab3f8782d6b4b98)) - add HOUR support for time partitioning interval ([#​91](https://www.github.com/googleapis/python-bigquery/issues/91)) ([0dd90b9](https://www.github.com/googleapis/python-bigquery/commit/0dd90b90e3714c1d18f8a404917a9454870e338a)) - add support for policy tags ([#​77](https://www.github.com/googleapis/python-bigquery/issues/77)) ([38a5c01](https://www.github.com/googleapis/python-bigquery/commit/38a5c01ca830daf165592357c45f2fb4016aad23)) - make AccessEntry objects hashable ([#​93](https://www.github.com/googleapis/python-bigquery/issues/93)) ([23a173b](https://www.github.com/googleapis/python-bigquery/commit/23a173bc5a25c0c8200adc5af62eb05624c9099e)) - **bigquery:** expose start index parameter for query result ([#​121](https://www.github.com/googleapis/python-bigquery/issues/121)) ([be86de3](https://www.github.com/googleapis/python-bigquery/commit/be86de330a3c3801653a0ccef90e3d9bdb3eee7a)) - **bigquery:** unit and system test for dataframe with int column with Nan values ([#​39](https://www.github.com/googleapis/python-bigquery/issues/39)) ([5fd840e](https://www.github.com/googleapis/python-bigquery/commit/5fd840e9d4c592c4f736f2fd4792c9670ba6795e)) ##### Bug Fixes - allow partial streaming_buffer statistics ([#​37](https://www.github.com/googleapis/python-bigquery/issues/37)) ([645f0fd](https://www.github.com/googleapis/python-bigquery/commit/645f0fdb35ee0e81ee70f7459e796a42a1f03210)) - distinguish server timeouts from transport timeouts ([#​43](https://www.github.com/googleapis/python-bigquery/issues/43)) ([a17be5f](https://www.github.com/googleapis/python-bigquery/commit/a17be5f01043f32d9fbfb2ddf456031ea9205c8f)) - improve cell magic error message on missing query ([#​58](https://www.github.com/googleapis/python-bigquery/issues/58)) ([6182cf4](https://www.github.com/googleapis/python-bigquery/commit/6182cf48aef8f463bb96891cfc44a96768121dbc)) - **bigquery:** fix repr of model reference ([#​66](https://www.github.com/googleapis/python-bigquery/issues/66)) ([26c6204](https://www.github.com/googleapis/python-bigquery/commit/26c62046f4ec8880cf6561cc90a8b821dcc84ec5)) - **bigquery:** fix start index with page size for list rows ([#​27](https://www.github.com/googleapis/python-bigquery/issues/27)) ([400673b](https://www.github.com/googleapis/python-bigquery/commit/400673b5d0f2a6a3d828fdaad9d222ca967ffeff))
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#GoogleCloudPlatform/python-docs-samples). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 2f8614ad0ca..d2273428325 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.16.1 google-cloud-bigquery-storage==1.0.0 -google-cloud-bigquery==1.24.0 +google-cloud-bigquery==1.25.0 pyarrow==0.17.1 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From ee3167d41499b94d4e2048da04305cfb01340f2d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 11 Jun 2020 21:51:16 +0200 Subject: [PATCH 028/338] Update dependency google-auth to v1.17.0 [(#4058)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4058) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index d2273428325..bf8b32519b0 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.16.1 +google-auth==1.17.0 google-cloud-bigquery-storage==1.0.0 google-cloud-bigquery==1.25.0 pyarrow==0.17.1 From a879e61466c6d749b1e15a51c89a63d1d8ed6ae6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 12 Jun 2020 02:32:11 +0200 Subject: [PATCH 029/338] chore(deps): update dependency google-auth to v1.17.1 [(#4073)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4073) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index bf8b32519b0..d7ff3068fe7 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.17.0 +google-auth==1.17.1 google-cloud-bigquery-storage==1.0.0 google-cloud-bigquery==1.25.0 pyarrow==0.17.1 From e5892b5f2850f01f3a7cbaa6d78a4cf53a825fa1 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 12 Jun 2020 22:53:46 +0200 Subject: [PATCH 030/338] Update dependency google-auth to v1.17.2 [(#4083)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4083) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index d7ff3068fe7..a374f62ae2b 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.17.1 +google-auth==1.17.2 google-cloud-bigquery-storage==1.0.0 google-cloud-bigquery==1.25.0 pyarrow==0.17.1 From fc83abf5e27bd6826d49815bfea0b5989d267a30 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 19 Jun 2020 05:34:55 +0200 Subject: [PATCH 031/338] Update dependency google-auth to v1.18.0 [(#4125)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4125) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index a374f62ae2b..1edd61d9e6a 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.17.2 +google-auth==1.18.0 google-cloud-bigquery-storage==1.0.0 google-cloud-bigquery==1.25.0 pyarrow==0.17.1 From f68598ebdf1b8bddec9d07892f9901eb8984cad2 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 13 Jul 2020 00:46:30 +0200 Subject: [PATCH 032/338] chore(deps): update dependency pytest to v5.4.3 [(#4279)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4279) * chore(deps): update dependency pytest to v5.4.3 * specify pytest for python 2 in appengine Co-authored-by: Leah Cole --- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 781d4326c94..79738af5f26 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==5.3.2 +pytest==5.4.3 From f2bcabf6d311c56b5150eabbeb46f6604a1f0a70 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 13 Jul 2020 22:20:34 +0200 Subject: [PATCH 033/338] chore(deps): update dependency google-auth to v1.19.0 [(#4293)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4293) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 1edd61d9e6a..59f48f9d86c 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.18.0 +google-auth==1.19.0 google-cloud-bigquery-storage==1.0.0 google-cloud-bigquery==1.25.0 pyarrow==0.17.1 From 258924e061aa301015c8c551986514db111453ee Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 17 Jul 2020 19:02:17 +0200 Subject: [PATCH 034/338] chore(deps): update dependency google-auth to v1.19.1 [(#4304)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4304) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 59f48f9d86c..d564fbdded7 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.19.0 +google-auth==1.19.1 google-cloud-bigquery-storage==1.0.0 google-cloud-bigquery==1.25.0 pyarrow==0.17.1 From 9346f22de0445eff6a18045152a7b1ceaca41c64 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 18 Jul 2020 02:48:10 +0200 Subject: [PATCH 035/338] chore(deps): update dependency google-auth to v1.19.2 [(#4321)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4321) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-auth](https://togithub.com/googleapis/google-auth-library-python) | patch | `==1.19.1` -> `==1.19.2` | --- ### Release Notes
googleapis/google-auth-library-python ### [`v1.19.2`](https://togithub.com/googleapis/google-auth-library-python/blob/master/CHANGELOG.md#​1192-httpswwwgithubcomgoogleapisgoogle-auth-library-pythoncomparev1191v1192-2020-07-17) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v1.19.1...v1.19.2)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#GoogleCloudPlatform/python-docs-samples). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index d564fbdded7..0ea81c43818 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.19.1 +google-auth==1.19.2 google-cloud-bigquery-storage==1.0.0 google-cloud-bigquery==1.25.0 pyarrow==0.17.1 From eeba38f37b1d2e915b77e85976ac916fa0d7a548 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 24 Jul 2020 19:24:14 +0200 Subject: [PATCH 036/338] chore(deps): update dependency pyarrow to v1 [(#4370)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4370) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 0ea81c43818..a8c899e5e6a 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,7 +1,7 @@ google-auth==1.19.2 google-cloud-bigquery-storage==1.0.0 google-cloud-bigquery==1.25.0 -pyarrow==0.17.1 +pyarrow==1.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' pandas==0.25.3; python_version > '3.0' From 99a27355c6d23016beb8e9a097ec39feb0e7c737 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 28 Jul 2020 22:36:14 +0200 Subject: [PATCH 037/338] Update dependency google-auth to v1.20.0 [(#4387)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4387) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index a8c899e5e6a..d855f0fd99b 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.19.2 +google-auth==1.20.0 google-cloud-bigquery-storage==1.0.0 google-cloud-bigquery==1.25.0 pyarrow==1.0.0 From 9b4f39f6c1b460824adaf0286ae52e03eb7bacd1 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 1 Aug 2020 21:51:00 +0200 Subject: [PATCH 038/338] Update dependency pytest to v6 [(#4390)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4390) --- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 79738af5f26..7e460c8c866 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==5.4.3 +pytest==6.0.1 From 16966dd20f047786267c034b3f62637a872d1269 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 7 Aug 2020 03:36:31 +0200 Subject: [PATCH 039/338] chore(deps): update dependency google-auth to v1.20.1 [(#4452)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4452) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index d855f0fd99b..80990849ac3 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.20.0 +google-auth==1.20.1 google-cloud-bigquery-storage==1.0.0 google-cloud-bigquery==1.25.0 pyarrow==1.0.0 From baf4247febb61c30dc25c7ccdd2ef3c400f40902 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 27 Aug 2020 05:43:45 +0200 Subject: [PATCH 040/338] chore(deps): update dependency pyarrow to v1.0.1 [(#4566)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4566) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 80990849ac3..6440b10de8f 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,7 +1,7 @@ google-auth==1.20.1 google-cloud-bigquery-storage==1.0.0 google-cloud-bigquery==1.25.0 -pyarrow==1.0.0 +pyarrow==1.0.1 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' pandas==0.25.3; python_version > '3.0' From 1abbbcab5218c6e0faf62b7e822b980ace726b05 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 28 Aug 2020 01:17:31 +0200 Subject: [PATCH 041/338] Update dependency google-auth to v1.21.0 [(#4588)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4588) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 6440b10de8f..b5ac6177272 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.20.1 +google-auth==1.21.0 google-cloud-bigquery-storage==1.0.0 google-cloud-bigquery==1.25.0 pyarrow==1.0.1 From e27b0b38f49ed7665566b8188ce2e31c5140119e Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 2 Sep 2020 16:03:50 +0200 Subject: [PATCH 042/338] chore: update templates --- bigquery_storage/AUTHORING_GUIDE.md | 1 + bigquery_storage/CONTRIBUTING.md | 1 + bigquery_storage/noxfile.py | 222 +++++++++++++++++++++ bigquery_storage/to_dataframe/main_test.py | 9 +- bigquery_storage/to_dataframe/noxfile.py | 222 +++++++++++++++++++++ 5 files changed, 448 insertions(+), 7 deletions(-) create mode 100644 bigquery_storage/AUTHORING_GUIDE.md create mode 100644 bigquery_storage/CONTRIBUTING.md create mode 100644 bigquery_storage/noxfile.py create mode 100644 bigquery_storage/to_dataframe/noxfile.py diff --git a/bigquery_storage/AUTHORING_GUIDE.md b/bigquery_storage/AUTHORING_GUIDE.md new file mode 100644 index 00000000000..55c97b32f4c --- /dev/null +++ b/bigquery_storage/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/bigquery_storage/CONTRIBUTING.md b/bigquery_storage/CONTRIBUTING.md new file mode 100644 index 00000000000..34c882b6f1a --- /dev/null +++ b/bigquery_storage/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/bigquery_storage/noxfile.py b/bigquery_storage/noxfile.py new file mode 100644 index 00000000000..5660f08be44 --- /dev/null +++ b/bigquery_storage/noxfile.py @@ -0,0 +1,222 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7"], + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars(): + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir): + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session): + session.install("flake8", "flake8-import-order") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + ".", + ] + session.run("flake8", *args) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session, post_install=None): + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session): + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root(): + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session, path): + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/bigquery_storage/to_dataframe/main_test.py b/bigquery_storage/to_dataframe/main_test.py index 8335b437063..126333bfe32 100644 --- a/bigquery_storage/to_dataframe/main_test.py +++ b/bigquery_storage/to_dataframe/main_test.py @@ -31,10 +31,7 @@ def clients(): ) # Make clients. - bqclient = bigquery.Client( - credentials=credentials, - project=your_project_id, - ) + bqclient = bigquery.Client(credentials=credentials, project=your_project_id,) bqstorageclient = bigquery_storage_v1beta1.BigQueryStorageClient( credentials=credentials ) @@ -130,9 +127,7 @@ def test_session_to_dataframe(capsys, clients): # We use a LIQUID strategy in this example because we only read from a # single stream. Consider BALANCED if you're consuming multiple streams # concurrently and want more consistent stream sizes. - sharding_strategy=( - bigquery_storage_v1beta1.enums.ShardingStrategy.LIQUID - ), + sharding_strategy=(bigquery_storage_v1beta1.enums.ShardingStrategy.LIQUID), ) # This example reads from only a single stream. Read from multiple streams diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py new file mode 100644 index 00000000000..5660f08be44 --- /dev/null +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -0,0 +1,222 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7"], + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars(): + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir): + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session): + session.install("flake8", "flake8-import-order") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + ".", + ] + session.run("flake8", *args) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session, post_install=None): + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session): + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root(): + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session, path): + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) From 076181ee22f118affc29e659d4fb2d1a0c82bab6 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 3 Sep 2020 15:09:12 +0200 Subject: [PATCH 043/338] Move quickstart sample into its own subdirectory --- bigquery_storage/quickstart/__init__.py | 15 +++++++++++++++ bigquery_storage/{ => quickstart}/noxfile.py | 0 bigquery_storage/{ => quickstart}/quickstart.py | 0 .../{tests => quickstart}/quickstart_test.py | 2 +- bigquery_storage/quickstart/requirements-test.txt | 1 + bigquery_storage/quickstart/requirements.txt | 2 ++ bigquery_storage/requirements.txt | 1 - bigquery_storage/tests/__init__.py | 0 8 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 bigquery_storage/quickstart/__init__.py rename bigquery_storage/{ => quickstart}/noxfile.py (100%) rename bigquery_storage/{ => quickstart}/quickstart.py (100%) rename bigquery_storage/{tests => quickstart}/quickstart_test.py (97%) create mode 100644 bigquery_storage/quickstart/requirements-test.txt create mode 100644 bigquery_storage/quickstart/requirements.txt delete mode 100644 bigquery_storage/requirements.txt delete mode 100644 bigquery_storage/tests/__init__.py diff --git a/bigquery_storage/quickstart/__init__.py b/bigquery_storage/quickstart/__init__.py new file mode 100644 index 00000000000..a2a70562f48 --- /dev/null +++ b/bigquery_storage/quickstart/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/bigquery_storage/noxfile.py b/bigquery_storage/quickstart/noxfile.py similarity index 100% rename from bigquery_storage/noxfile.py rename to bigquery_storage/quickstart/noxfile.py diff --git a/bigquery_storage/quickstart.py b/bigquery_storage/quickstart/quickstart.py similarity index 100% rename from bigquery_storage/quickstart.py rename to bigquery_storage/quickstart/quickstart.py diff --git a/bigquery_storage/tests/quickstart_test.py b/bigquery_storage/quickstart/quickstart_test.py similarity index 97% rename from bigquery_storage/tests/quickstart_test.py rename to bigquery_storage/quickstart/quickstart_test.py index fde039f4620..37b1b2dda10 100644 --- a/bigquery_storage/tests/quickstart_test.py +++ b/bigquery_storage/quickstart/quickstart_test.py @@ -17,7 +17,7 @@ import pytest -from .. import quickstart +from . import quickstart def now_millis(): diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt new file mode 100644 index 00000000000..7e460c8c866 --- /dev/null +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -0,0 +1 @@ +pytest==6.0.1 diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt new file mode 100644 index 00000000000..31c61a34541 --- /dev/null +++ b/bigquery_storage/quickstart/requirements.txt @@ -0,0 +1,2 @@ +fastavro +google-cloud-bigquery-storage==1.0.0 diff --git a/bigquery_storage/requirements.txt b/bigquery_storage/requirements.txt deleted file mode 100644 index acd0800e713..00000000000 --- a/bigquery_storage/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -fastavro \ No newline at end of file diff --git a/bigquery_storage/tests/__init__.py b/bigquery_storage/tests/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 From 030276c9a7157226b26534699d7a58b29b1512df Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 24 Sep 2020 17:18:39 +0200 Subject: [PATCH 044/338] chore: transition the library to microgenerator (#62) * chore: remove old GAPIC code for v1 API * Regenerate the v1 API with microgenerator * Adjust dependencies and classifiers in setup.py * Fix types aggregation in types.py * Adjust import paths * Fix and adjust unit tests * Fix and adjust system tests * Adjust unit test coverage threshold Not all paths are covered, not even in the generated code, thus the adjustment is necessary. * Fix docs build * Adjust quickstart sample * Adjust sample in client docstring * Remove beta API code and docs * Simplify synth replacement rules and regenerate Rules conditionally matching versions other than v1 are not needed anymore. * Consolidate imports in google.cloud.bigquery.storage * Use gogole.cloud.bigquery.storage as import path * Hide async client from most import paths * Use GAPIC client mock in ReadRowsStream tests * Remove redundant installations in nox sessions * Include manual classes in reference docs * Add UPGRADING guide * Add minor CHANGELOG improvements --- bigquery_storage/quickstart/quickstart.py | 24 +++++------ bigquery_storage/to_dataframe/main_test.py | 48 ++++++++++------------ 2 files changed, 33 insertions(+), 39 deletions(-) diff --git a/bigquery_storage/quickstart/quickstart.py b/bigquery_storage/quickstart/quickstart.py index 8358fdc1336..ef42f02ab14 100644 --- a/bigquery_storage/quickstart/quickstart.py +++ b/bigquery_storage/quickstart/quickstart.py @@ -17,7 +17,8 @@ def main(project_id="your-project-id", snapshot_millis=0): # [START bigquerystorage_quickstart] - from google.cloud import bigquery_storage_v1 + from google.cloud.bigquery.storage import BigQueryReadClient + from google.cloud.bigquery.storage import types # TODO(developer): Set the project_id variable. # project_id = 'your-project-id' @@ -25,38 +26,35 @@ def main(project_id="your-project-id", snapshot_millis=0): # The read session is created in this project. This project can be # different from that which contains the table. - client = bigquery_storage_v1.BigQueryReadClient() + client = BigQueryReadClient() # This example reads baby name data from the public datasets. table = "projects/{}/datasets/{}/tables/{}".format( "bigquery-public-data", "usa_names", "usa_1910_current" ) - requested_session = bigquery_storage_v1.types.ReadSession() + requested_session = types.ReadSession() requested_session.table = table # This API can also deliver data serialized in Apache Arrow format. # This example leverages Apache Avro. - requested_session.data_format = bigquery_storage_v1.enums.DataFormat.AVRO + requested_session.data_format = types.DataFormat.AVRO # We limit the output columns to a subset of those allowed in the table, # and set a simple filter to only report names from the state of # Washington (WA). - requested_session.read_options.selected_fields.append("name") - requested_session.read_options.selected_fields.append("number") - requested_session.read_options.selected_fields.append("state") + requested_session.read_options.selected_fields = ["name", "number", "state"] requested_session.read_options.row_restriction = 'state = "WA"' # Set a snapshot time if it's been specified. - modifiers = None if snapshot_millis > 0: - requested_session.table_modifiers.snapshot_time.FromMilliseconds( - snapshot_millis - ) + snapshot_time = types.Timestamp() + snapshot_time.FromMilliseconds(snapshot_millis) + requested_session.table_modifiers.snapshot_time = snapshot_time parent = "projects/{}".format(project_id) session = client.create_read_session( - parent, - requested_session, + parent=parent, + read_session=requested_session, # We'll use only a single stream for reading data from the table. However, # if you wanted to fan out multiple readers you could do so by having a # reader process each individual stream. diff --git a/bigquery_storage/to_dataframe/main_test.py b/bigquery_storage/to_dataframe/main_test.py index 126333bfe32..ecce1685ebf 100644 --- a/bigquery_storage/to_dataframe/main_test.py +++ b/bigquery_storage/to_dataframe/main_test.py @@ -21,7 +21,7 @@ def clients(): # [START bigquerystorage_pandas_tutorial_create_client] import google.auth from google.cloud import bigquery - from google.cloud import bigquery_storage_v1beta1 + from google.cloud.bigquery import storage # Explicitly create a credentials object. This allows you to use the same # credentials for both the BigQuery and BigQuery Storage clients, avoiding @@ -32,9 +32,7 @@ def clients(): # Make clients. bqclient = bigquery.Client(credentials=credentials, project=your_project_id,) - bqstorageclient = bigquery_storage_v1beta1.BigQueryStorageClient( - credentials=credentials - ) + bqstorageclient = storage.BigQueryReadClient(credentials=credentials) # [END bigquerystorage_pandas_tutorial_create_client] # [END bigquerystorage_pandas_tutorial_all] return bqclient, bqstorageclient @@ -98,48 +96,46 @@ def test_query_to_dataframe(capsys, clients): def test_session_to_dataframe(capsys, clients): - from google.cloud import bigquery_storage_v1beta1 + from google.cloud.bigquery.storage import types bqclient, bqstorageclient = clients your_project_id = bqclient.project # [START bigquerystorage_pandas_tutorial_all] # [START bigquerystorage_pandas_tutorial_read_session] - table = bigquery_storage_v1beta1.types.TableReference() - table.project_id = "bigquery-public-data" - table.dataset_id = "new_york_trees" - table.table_id = "tree_species" + project_id = "bigquery-public-data" + dataset_id = "new_york_trees" + table_id = "tree_species" + table = f"projects/{project_id}/datasets/{dataset_id}/tables/{table_id}" # Select columns to read with read options. If no read options are # specified, the whole table is read. - read_options = bigquery_storage_v1beta1.types.TableReadOptions() - read_options.selected_fields.append("species_common_name") - read_options.selected_fields.append("fall_color") + read_options = types.ReadSession.TableReadOptions( + selected_fields=["species_common_name", "fall_color"] + ) parent = "projects/{}".format(your_project_id) - session = bqstorageclient.create_read_session( - table, - parent, - read_options=read_options, + + requested_session = types.ReadSession( + table=table, # This API can also deliver data serialized in Apache Avro format. # This example leverages Apache Arrow. - format_=bigquery_storage_v1beta1.enums.DataFormat.ARROW, - # We use a LIQUID strategy in this example because we only read from a - # single stream. Consider BALANCED if you're consuming multiple streams - # concurrently and want more consistent stream sizes. - sharding_strategy=(bigquery_storage_v1beta1.enums.ShardingStrategy.LIQUID), + data_format=types.DataFormat.ARROW, + read_options=read_options, + ) + read_session = bqstorageclient.create_read_session( + parent=parent, read_session=requested_session ) # This example reads from only a single stream. Read from multiple streams # to fetch data faster. Note that the session may not contain any streams # if there are no rows to read. - stream = session.streams[0] - position = bigquery_storage_v1beta1.types.StreamPosition(stream=stream) - reader = bqstorageclient.read_rows(position) + stream = read_session.streams[0] + reader = bqstorageclient.read_rows(stream.name) - # Parse all Avro blocks and create a dataframe. This call requires a + # Parse all Arrow blocks and create a dataframe. This call requires a # session, because the session contains the schema for the row blocks. - dataframe = reader.to_dataframe(session) + dataframe = reader.to_dataframe(read_session) print(dataframe.head()) # [END bigquerystorage_pandas_tutorial_read_session] # [END bigquerystorage_pandas_tutorial_all] From 80e91b58b5db2542f702ec2558d0e8113783a5cc Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Tue, 29 Sep 2020 18:25:48 +0200 Subject: [PATCH 045/338] chore: Release v2.0.0 (#64) * chore: release v2.0.0 * Update CHANGELOG.md * Replace PROJECT_ID with GOOGLE_CLOUD_PROJECT in test * Do not add google.cloud.bigquery as namespace package * Install the library as non-editable in tests This avoids import errors from google.cloud.bigquery.* namespace. * Fix test coverage plugin paths * Regenerate code with different namespace (bigquery_storage) * Adjust import paths to bigquery_storage namespace * Adjust docs to bigquery_storage namespace * Adjust UPGRADING guide to changed namespace Co-authored-by: Tim Swast --- bigquery_storage/quickstart/quickstart.py | 4 ++-- bigquery_storage/quickstart/quickstart_test.py | 2 +- bigquery_storage/to_dataframe/main_test.py | 6 +++--- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/bigquery_storage/quickstart/quickstart.py b/bigquery_storage/quickstart/quickstart.py index ef42f02ab14..4372c22dd60 100644 --- a/bigquery_storage/quickstart/quickstart.py +++ b/bigquery_storage/quickstart/quickstart.py @@ -17,8 +17,8 @@ def main(project_id="your-project-id", snapshot_millis=0): # [START bigquerystorage_quickstart] - from google.cloud.bigquery.storage import BigQueryReadClient - from google.cloud.bigquery.storage import types + from google.cloud.bigquery_storage import BigQueryReadClient + from google.cloud.bigquery_storage import types # TODO(developer): Set the project_id variable. # project_id = 'your-project-id' diff --git a/bigquery_storage/quickstart/quickstart_test.py b/bigquery_storage/quickstart/quickstart_test.py index 37b1b2dda10..33494cca6e6 100644 --- a/bigquery_storage/quickstart/quickstart_test.py +++ b/bigquery_storage/quickstart/quickstart_test.py @@ -29,7 +29,7 @@ def now_millis(): @pytest.fixture() def project_id(): - return os.environ["PROJECT_ID"] + return os.environ["GOOGLE_CLOUD_PROJECT"] def test_quickstart_wo_snapshot(capsys, project_id): diff --git a/bigquery_storage/to_dataframe/main_test.py b/bigquery_storage/to_dataframe/main_test.py index ecce1685ebf..4682057800d 100644 --- a/bigquery_storage/to_dataframe/main_test.py +++ b/bigquery_storage/to_dataframe/main_test.py @@ -21,7 +21,7 @@ def clients(): # [START bigquerystorage_pandas_tutorial_create_client] import google.auth from google.cloud import bigquery - from google.cloud.bigquery import storage + from google.cloud import bigquery_storage # Explicitly create a credentials object. This allows you to use the same # credentials for both the BigQuery and BigQuery Storage clients, avoiding @@ -32,7 +32,7 @@ def clients(): # Make clients. bqclient = bigquery.Client(credentials=credentials, project=your_project_id,) - bqstorageclient = storage.BigQueryReadClient(credentials=credentials) + bqstorageclient = bigquery_storage.BigQueryReadClient(credentials=credentials) # [END bigquerystorage_pandas_tutorial_create_client] # [END bigquerystorage_pandas_tutorial_all] return bqclient, bqstorageclient @@ -96,7 +96,7 @@ def test_query_to_dataframe(capsys, clients): def test_session_to_dataframe(capsys, clients): - from google.cloud.bigquery.storage import types + from google.cloud.bigquery_storage import types bqclient, bqstorageclient = clients your_project_id = bqclient.project From 64b3a4f862173119d8c75ad0b9181cb3294ae667 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 16 Oct 2020 09:29:21 -0700 Subject: [PATCH 046/338] doc: update resource docstrings (#60) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * chore(python): use BUILD_SPECIFIC_GCLOUD_PROJECT for samples https://github.com/googleapis/python-talent/blob/ef045e8eb348db36d7a2a611e6f26b11530d273b/samples/snippets/noxfile_config.py#L27-L32 `BUILD_SPECIFIC_GCLOUD_PROJECT` is an alternate project used for sample tests that do poorly with concurrent runs on the same project. Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Wed Sep 30 13:06:03 2020 -0600 Source-Repo: googleapis/synthtool Source-Sha: 9b0da5204ab90bcc36f8cd4e5689eff1a54cc3e4 Source-Link: https://github.com/googleapis/synthtool/commit/9b0da5204ab90bcc36f8cd4e5689eff1a54cc3e4 * chore(python): use 'setup.py' to detect repo root Closes #792 Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Fri Oct 9 15:06:33 2020 -0600 Source-Repo: googleapis/synthtool Source-Sha: e0ae456852bf22f38796deb79cff30b516fde244 Source-Link: https://github.com/googleapis/synthtool/commit/e0ae456852bf22f38796deb79cff30b516fde244 * build(python): samples tests should pass if no samples exist Source-Author: Daniel Sanche Source-Date: Wed Oct 14 08:00:06 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: 477764cc4ee6db346d3febef2bb1ea0abf27de52 Source-Link: https://github.com/googleapis/synthtool/commit/477764cc4ee6db346d3febef2bb1ea0abf27de52 --- bigquery_storage/quickstart/noxfile.py | 5 +++++ bigquery_storage/to_dataframe/noxfile.py | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index 5660f08be44..f3a90583ad5 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -199,6 +199,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index 5660f08be44..f3a90583ad5 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -199,6 +199,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") From 95a117b9f0393ebbe60182932bc88554dcbfd9c5 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 16 Oct 2020 13:45:11 -0500 Subject: [PATCH 047/338] docs: update to_dataframe sample to latest dependencies (#72) Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index b5ac6177272..58712e8f74e 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.21.0 -google-cloud-bigquery-storage==1.0.0 -google-cloud-bigquery==1.25.0 +google-cloud-bigquery-storage==2.0.0 +google-cloud-bigquery==2.1.0 pyarrow==1.0.1 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From 4fb1f49bf11117a7a1ca42ceac60ee3cb711973d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 28 Oct 2020 19:32:05 +0100 Subject: [PATCH 048/338] chore(deps): update dependency google-cloud-bigquery to v2.2.0 (#85) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-cloud-bigquery](https://togithub.com/googleapis/python-bigquery) | minor | `==2.1.0` -> `==2.2.0` | --- ### Release Notes
googleapis/python-bigquery ### [`v2.2.0`](https://togithub.com/googleapis/python-bigquery/blob/master/CHANGELOG.md#​220-httpswwwgithubcomgoogleapispython-bigquerycomparev210v220-2020-10-19) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.1.0...v2.2.0) ##### Features - add method api_repr for table list item ([#​299](https://www.github.com/googleapis/python-bigquery/issues/299)) ([07c70f0](https://www.github.com/googleapis/python-bigquery/commit/07c70f0292f9212f0c968cd5c9206e8b0409c0da)) - add support for listing arima, automl, boosted tree, DNN, and matrix factorization models ([#​328](https://www.github.com/googleapis/python-bigquery/issues/328)) ([502a092](https://www.github.com/googleapis/python-bigquery/commit/502a0926018abf058cb84bd18043c25eba15a2cc)) - add timeout paramter to load_table_from_file and it dependent methods ([#​327](https://www.github.com/googleapis/python-bigquery/issues/327)) ([b0dd892](https://www.github.com/googleapis/python-bigquery/commit/b0dd892176e31ac25fddd15554b5bfa054299d4d)) - add to_api_repr method to Model ([#​326](https://www.github.com/googleapis/python-bigquery/issues/326)) ([fb401bd](https://www.github.com/googleapis/python-bigquery/commit/fb401bd94477323bba68cf252dd88166495daf54)) - allow client options to be set in magics context ([#​322](https://www.github.com/googleapis/python-bigquery/issues/322)) ([5178b55](https://www.github.com/googleapis/python-bigquery/commit/5178b55682f5e264bfc082cde26acb1fdc953a18)) ##### Bug Fixes - make TimePartitioning repr evaluable ([#​110](https://www.github.com/googleapis/python-bigquery/issues/110)) ([20f473b](https://www.github.com/googleapis/python-bigquery/commit/20f473bfff5ae98377f5d9cdf18bfe5554d86ff4)), closes [#​109](https://www.github.com/googleapis/python-bigquery/issues/109) - use version.py instead of pkg_resources.get_distribution ([#​307](https://www.github.com/googleapis/python-bigquery/issues/307)) ([b8f502b](https://www.github.com/googleapis/python-bigquery/commit/b8f502b14f21d1815697e4d57cf1225dfb4a7c5e)) ##### Performance Improvements - add size parameter for load table from dataframe and json methods ([#​280](https://www.github.com/googleapis/python-bigquery/issues/280)) ([3be78b7](https://www.github.com/googleapis/python-bigquery/commit/3be78b737add7111e24e912cd02fc6df75a07de6)) ##### Documentation - update clustering field docstrings ([#​286](https://www.github.com/googleapis/python-bigquery/issues/286)) ([5ea1ece](https://www.github.com/googleapis/python-bigquery/commit/5ea1ece2d911cdd1f3d9549ee01559ce8ed8269a)), closes [#​285](https://www.github.com/googleapis/python-bigquery/issues/285) - update snippets samples to support version 2.0 ([#​309](https://www.github.com/googleapis/python-bigquery/issues/309)) ([61634be](https://www.github.com/googleapis/python-bigquery/commit/61634be9bf9e3df7589fc1bfdbda87288859bb13)) ##### Dependencies - add protobuf dependency ([#​306](https://www.github.com/googleapis/python-bigquery/issues/306)) ([cebb5e0](https://www.github.com/googleapis/python-bigquery/commit/cebb5e0e911e8c9059bc8c9e7fce4440e518bff3)), closes [#​305](https://www.github.com/googleapis/python-bigquery/issues/305) - require pyarrow for pandas support ([#​314](https://www.github.com/googleapis/python-bigquery/issues/314)) ([801e4c0](https://www.github.com/googleapis/python-bigquery/commit/801e4c0574b7e421aa3a28cafec6fd6bcce940dd)), closes [#​265](https://www.github.com/googleapis/python-bigquery/issues/265)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 58712e8f74e..85a10d0befe 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.21.0 google-cloud-bigquery-storage==2.0.0 -google-cloud-bigquery==2.1.0 +google-cloud-bigquery==2.2.0 pyarrow==1.0.1 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From c9386d7147e488dee9fd59b5a15c03d73be01f79 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 28 Oct 2020 19:44:02 +0100 Subject: [PATCH 049/338] chore(deps): update dependency google-auth to v1.22.1 (#84) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-auth](https://togithub.com/googleapis/google-auth-library-python) | minor | `==1.21.0` -> `==1.22.1` | --- ### Release Notes
googleapis/google-auth-library-python ### [`v1.22.1`](https://togithub.com/googleapis/google-auth-library-python/blob/master/CHANGELOG.md#​1221-httpswwwgithubcomgoogleapisgoogle-auth-library-pythoncomparev1220v1221-2020-10-05) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v1.22.0...v1.22.1) ### [`v1.22.0`](https://togithub.com/googleapis/google-auth-library-python/blob/master/CHANGELOG.md#​1220-httpswwwgithubcomgoogleapisgoogle-auth-library-pythoncomparev1213v1220-2020-09-28) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v1.21.3...v1.22.0) ##### Features - add asyncio based auth flow ([#​612](https://www.github.com/googleapis/google-auth-library-python/issues/612)) ([7e15258](https://www.github.com/googleapis/google-auth-library-python/commit/7e1525822d51bd9ce7dffca42d71313e6e776fcd)), closes [#​572](https://www.github.com/googleapis/google-auth-library-python/issues/572) ##### [1.21.3](https://www.github.com/googleapis/google-auth-library-python/compare/v1.21.2...v1.21.3) (2020-09-22) ##### Bug Fixes - fix expiry for `to_json()` ([#​589](https://www.github.com/googleapis/google-auth-library-python/issues/589)) ([d0e0aba](https://www.github.com/googleapis/google-auth-library-python/commit/d0e0aba0a9f665268ffa1b22d44f4bd7e9b449d6)), closes [/github.com/googleapis/oauth2client/blob/master/oauth2client/client.py#L55](https://www.github.com/googleapis//github.com/googleapis/oauth2client/blob/master/oauth2client/client.py/issues/L55) ##### [1.21.2](https://www.github.com/googleapis/google-auth-library-python/compare/v1.21.1...v1.21.2) (2020-09-08) ##### Bug Fixes - migrate signBlob to iamcredentials.googleapis.com ([#​600](https://www.github.com/googleapis/google-auth-library-python/issues/600)) ([694d83f](https://www.github.com/googleapis/google-auth-library-python/commit/694d83fd23c0e8c2fde27136d1b3f8f6db6338a6)) ##### [1.21.1](https://www.github.com/googleapis/google-auth-library-python/compare/v1.21.0...v1.21.1) (2020-09-03) ##### Bug Fixes - dummy commit to trigger a auto release ([#​597](https://www.github.com/googleapis/google-auth-library-python/issues/597)) ([d32f7df](https://www.github.com/googleapis/google-auth-library-python/commit/d32f7df4895122ef23b664672d7db3f58d9b7d36)) ### [`v1.21.3`](https://togithub.com/googleapis/google-auth-library-python/blob/master/CHANGELOG.md#​1213-httpswwwgithubcomgoogleapisgoogle-auth-library-pythoncomparev1212v1213-2020-09-22) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v1.21.2...v1.21.3) ### [`v1.21.2`](https://togithub.com/googleapis/google-auth-library-python/blob/master/CHANGELOG.md#​1212-httpswwwgithubcomgoogleapisgoogle-auth-library-pythoncomparev1211v1212-2020-09-08) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v1.21.1...v1.21.2) ### [`v1.21.1`](https://togithub.com/googleapis/google-auth-library-python/blob/master/CHANGELOG.md#​1211-httpswwwgithubcomgoogleapisgoogle-auth-library-pythoncomparev1210v1211-2020-09-03) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v1.21.0...v1.21.1)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 85a10d0befe..2687648b41a 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.21.0 +google-auth==1.22.1 google-cloud-bigquery-storage==2.0.0 google-cloud-bigquery==2.2.0 pyarrow==1.0.1 From 9910616de9f4c5d3af1655155ecaa5395be4cef6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 30 Oct 2020 18:02:02 +0100 Subject: [PATCH 050/338] chore(deps): update dependency google-auth to v1.23.0 (#92) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-auth](https://togithub.com/googleapis/google-auth-library-python) | minor | `==1.22.1` -> `==1.23.0` | --- ### Release Notes
googleapis/google-auth-library-python ### [`v1.23.0`](https://togithub.com/googleapis/google-auth-library-python/blob/master/CHANGELOG.md#​1230-httpswwwgithubcomgoogleapisgoogle-auth-library-pythoncomparev1221v1230-2020-10-29) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v1.22.1...v1.23.0) ##### Features - Add custom scopes for access tokens from the metadata service ([#​633](https://www.github.com/googleapis/google-auth-library-python/issues/633)) ([0323cf3](https://www.github.com/googleapis/google-auth-library-python/commit/0323cf390b16e8483660ac88775e8ea4e7f7702d)) ##### Bug Fixes - **deps:** Revert "fix: pin 'aoihttp < 3.7.0dev' ([#​634](https://www.github.com/googleapis/google-auth-library-python/issues/634))" ([#​632](https://www.github.com/googleapis/google-auth-library-python/issues/632)) ([#​640](https://www.github.com/googleapis/google-auth-library-python/issues/640)) ([b790e65](https://www.github.com/googleapis/google-auth-library-python/commit/b790e6535cc37591b23866027a426cde312e07c1)) - pin 'aoihttp < 3.7.0dev' ([#​634](https://www.github.com/googleapis/google-auth-library-python/issues/634)) ([05f9524](https://www.github.com/googleapis/google-auth-library-python/commit/05f95246fab928fe2f445781117eeac8088497fb)) - remove checks for ancient versions of Cryptography ([#​596](https://www.github.com/googleapis/google-auth-library-python/issues/596)) ([6407258](https://www.github.com/googleapis/google-auth-library-python/commit/6407258956ec42e3b722418cb7f366e5ae9272ec)), closes [/github.com/googleapis/google-auth-library-python/issues/595#issuecomment-683903062](https://www.github.com/googleapis//github.com/googleapis/google-auth-library-python/issues/595/issues/issuecomment-683903062) ##### [1.22.1](https://www.github.com/googleapis/google-auth-library-python/compare/v1.22.0...v1.22.1) (2020-10-05) ##### Bug Fixes - move aiohttp to extra as it is currently internal surface ([#​619](https://www.github.com/googleapis/google-auth-library-python/issues/619)) ([a924011](https://www.github.com/googleapis/google-auth-library-python/commit/a9240111e7af29338624d98ee10aed31462f4d19)), closes [#​618](https://www.github.com/googleapis/google-auth-library-python/issues/618)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 2687648b41a..14c1784c27b 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.22.1 +google-auth==1.23.0 google-cloud-bigquery-storage==2.0.0 google-cloud-bigquery==2.2.0 pyarrow==1.0.1 From 678845f898a3480695ced967d4bff55a2bf2879d Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 6 Nov 2020 09:44:27 -0800 Subject: [PATCH 051/338] chore: re-generated to pick up changes from synthtool (#95) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * fix(python_library): fix external unit test dependencies I recently submitted https://github.com/googleapis/synthtool/pull/811/files, allowing external dependencies for unit tests. This fixes a small missing comma bug Source-Author: Daniel Sanche Source-Date: Thu Oct 29 16:58:01 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: 6542bd723403513626f61642fc02ddca528409aa Source-Link: https://github.com/googleapis/synthtool/commit/6542bd723403513626f61642fc02ddca528409aa * chore: add type hint check Source-Author: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Source-Date: Wed Nov 4 17:36:32 2020 -0800 Source-Repo: googleapis/synthtool Source-Sha: 3d3e94c4e02370f307a9a200b0c743c3d8d19f29 Source-Link: https://github.com/googleapis/synthtool/commit/3d3e94c4e02370f307a9a200b0c743c3d8d19f29 --- bigquery_storage/quickstart/noxfile.py | 8 +++++++- bigquery_storage/to_dataframe/noxfile.py | 8 +++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index f3a90583ad5..9be2c922fd2 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -38,6 +38,9 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. "ignored_versions": ["2.7"], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string @@ -130,7 +133,10 @@ def _determine_local_import_names(start_dir): @nox.session def lint(session): - session.install("flake8", "flake8-import-order") + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index f3a90583ad5..9be2c922fd2 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -38,6 +38,9 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. "ignored_versions": ["2.7"], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string @@ -130,7 +133,10 @@ def _determine_local_import_names(start_dir): @nox.session def lint(session): - session.install("flake8", "flake8-import-order") + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ From d0dfe3f6c2abdcdc659b63fe3943be7114fc18c3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 12 Nov 2020 18:46:21 +0100 Subject: [PATCH 052/338] chore(deps): update dependency pyarrow to v2 (#90) Co-authored-by: Tim Swast Co-authored-by: Takashi Matsuo --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 14c1784c27b..2c36d997247 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,7 +1,7 @@ google-auth==1.23.0 google-cloud-bigquery-storage==2.0.0 google-cloud-bigquery==2.2.0 -pyarrow==1.0.1 +pyarrow==2.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' pandas==0.25.3; python_version > '3.0' From 49c8a0f8881bcd93593e0b0618813d923d74187d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 12 Nov 2020 19:13:08 +0100 Subject: [PATCH 053/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.0.1 (#88) * chore(deps): update dependency google-cloud-bigquery-storage to v2.0.1 * test: make system test more robust I was a bit worried that since WA appeared in the request as part of the filter, the test might pass even in certain error conditions. Co-authored-by: Tim Swast Co-authored-by: Takashi Matsuo --- bigquery_storage/quickstart/quickstart.py | 2 +- bigquery_storage/quickstart/quickstart_test.py | 4 ++-- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/quickstart/quickstart.py b/bigquery_storage/quickstart/quickstart.py index 4372c22dd60..7dda6bbfe48 100644 --- a/bigquery_storage/quickstart/quickstart.py +++ b/bigquery_storage/quickstart/quickstart.py @@ -79,7 +79,7 @@ def main(project_id="your-project-id", snapshot_millis=0): names.add(row["name"]) states.add(row["state"]) - print("Got {} unique names in states: {}".format(len(names), states)) + print("Got {} unique names in states: {}".format(len(names), ", ".join(states))) # [END bigquerystorage_quickstart] diff --git a/bigquery_storage/quickstart/quickstart_test.py b/bigquery_storage/quickstart/quickstart_test.py index 33494cca6e6..23f3c350413 100644 --- a/bigquery_storage/quickstart/quickstart_test.py +++ b/bigquery_storage/quickstart/quickstart_test.py @@ -35,10 +35,10 @@ def project_id(): def test_quickstart_wo_snapshot(capsys, project_id): quickstart.main(project_id) out, _ = capsys.readouterr() - assert "WA" in out + assert "unique names in states: WA" in out def test_quickstart_with_snapshot(capsys, project_id): quickstart.main(project_id, now_millis() - 5000) out, _ = capsys.readouterr() - assert "WA" in out + assert "unique names in states: WA" in out diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 31c61a34541..83912611c23 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==1.0.0 +google-cloud-bigquery-storage==2.0.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 2c36d997247..076b92032b9 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==1.23.0 -google-cloud-bigquery-storage==2.0.0 +google-cloud-bigquery-storage==2.0.1 google-cloud-bigquery==2.2.0 pyarrow==2.0.0 ipython==7.10.2; python_version > '3.0' From c4e4f4de9901c0822f81e29698dc4feb56449b73 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 12 Nov 2020 11:42:28 -0800 Subject: [PATCH 054/338] chore: add blacken to template (#96) Source-Author: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Source-Date: Thu Nov 5 15:22:03 2020 -0800 Source-Repo: googleapis/synthtool Source-Sha: 1f1148d3c7a7a52f0c98077f976bd9b3c948ee2b Source-Link: https://github.com/googleapis/synthtool/commit/1f1148d3c7a7a52f0c98077f976bd9b3c948ee2b Co-authored-by: Tim Swast --- bigquery_storage/quickstart/noxfile.py | 13 +++++++++++++ bigquery_storage/to_dataframe/noxfile.py | 13 +++++++++++++ 2 files changed, 26 insertions(+) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index 9be2c922fd2..ab2c49227c3 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -147,6 +147,19 @@ def lint(session): session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session): + session.install("black") + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + # # Sample Tests # diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index 9be2c922fd2..ab2c49227c3 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -147,6 +147,19 @@ def lint(session): session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session): + session.install("black") + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + # # Sample Tests # From b20033c77e828098efdedc6c4e0899525c808943 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 13 Nov 2020 01:34:02 +0100 Subject: [PATCH 055/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.1.0 (#98) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 83912611c23..6361f957fc8 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.0.1 +google-cloud-bigquery-storage==2.1.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 076b92032b9..f7fb919d582 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==1.23.0 -google-cloud-bigquery-storage==2.0.1 +google-cloud-bigquery-storage==2.1.0 google-cloud-bigquery==2.2.0 pyarrow==2.0.0 ipython==7.10.2; python_version > '3.0' From 34929db726cec70b2a0bb3d7e7c1f4b9830ac21b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 16 Nov 2020 19:05:56 +0100 Subject: [PATCH 056/338] chore(deps): update dependency google-cloud-bigquery to v2.3.1 (#97) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index f7fb919d582..7542e362018 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.23.0 google-cloud-bigquery-storage==2.1.0 -google-cloud-bigquery==2.2.0 +google-cloud-bigquery==2.3.1 pyarrow==2.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From 4c8bde52260f78b86527402709d3cd382b8dd3e7 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 7 Dec 2020 23:43:02 +0100 Subject: [PATCH 057/338] chore(deps): update dependency google-cloud-bigquery to v2.5.0 (#100) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 7542e362018..0bc709e743b 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.23.0 google-cloud-bigquery-storage==2.1.0 -google-cloud-bigquery==2.3.1 +google-cloud-bigquery==2.5.0 pyarrow==2.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From 4c6463b196f2c0d7280a7ff5c0b2f6fc277531d4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 8 Dec 2020 04:17:56 +0100 Subject: [PATCH 058/338] chore(deps): update dependency google-cloud-bigquery to v2.6.0 (#103) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 0bc709e743b..71bf59f584f 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.23.0 google-cloud-bigquery-storage==2.1.0 -google-cloud-bigquery==2.5.0 +google-cloud-bigquery==2.6.0 pyarrow==2.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From 796ec947c15a92aafd40689c4841d45578551e72 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 5 Jan 2021 19:23:06 +0100 Subject: [PATCH 059/338] chore(deps): update dependency google-auth to v1.24.0 (#108) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 71bf59f584f..0d87ca535eb 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.23.0 +google-auth==1.24.0 google-cloud-bigquery-storage==2.1.0 google-cloud-bigquery==2.6.0 pyarrow==2.0.0 From 0510cc58a5fbffcd6c7ec14a65b5d46087b49f06 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 8 Jan 2021 21:07:51 +0100 Subject: [PATCH 060/338] chore(deps): update dependency google-cloud-bigquery to v2.6.1 (#109) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 0d87ca535eb..6c9dc681c34 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.24.0 google-cloud-bigquery-storage==2.1.0 -google-cloud-bigquery==2.6.0 +google-cloud-bigquery==2.6.1 pyarrow==2.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From a7be59d46c44916b53f17df7f117c4e1496c541c Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 12 Jan 2021 10:42:05 -0600 Subject: [PATCH 061/338] chore: remove manual workaround for response size validation (#112) * chore: remove manual workaround for response size validation * test: revert extras changes to fix 3.9 build --- bigquery_storage/quickstart/noxfile.py | 19 +++++++++++-------- bigquery_storage/to_dataframe/noxfile.py | 19 +++++++++++-------- 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index ab2c49227c3..bbd25fcdb5e 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -65,7 +66,7 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} @@ -94,7 +95,7 @@ def get_pytest_env_vars(): # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -132,7 +133,7 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): +def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG["enforce_type_hints"]: session.install("flake8", "flake8-import-order") else: @@ -153,7 +154,7 @@ def lint(session): @nox.session -def blacken(session): +def blacken(session: nox.sessions.Session) -> None: session.install("black") python_files = [path for path in os.listdir(".") if path.endswith(".py")] @@ -168,7 +169,9 @@ def blacken(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): session.install("-r", "requirements.txt") @@ -194,7 +197,7 @@ def _session_tests(session, post_install=None): @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) @@ -209,7 +212,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -232,7 +235,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index ab2c49227c3..bbd25fcdb5e 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -65,7 +66,7 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} @@ -94,7 +95,7 @@ def get_pytest_env_vars(): # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -132,7 +133,7 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): +def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG["enforce_type_hints"]: session.install("flake8", "flake8-import-order") else: @@ -153,7 +154,7 @@ def lint(session): @nox.session -def blacken(session): +def blacken(session: nox.sessions.Session) -> None: session.install("black") python_files = [path for path in os.listdir(".") if path.endswith(".py")] @@ -168,7 +169,9 @@ def blacken(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): session.install("-r", "requirements.txt") @@ -194,7 +197,7 @@ def _session_tests(session, post_install=None): @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) @@ -209,7 +212,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -232,7 +235,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) From d4626e9034fdcd6be43d4442bd5d81b47b8aaaef Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Wed, 13 Jan 2021 15:33:59 -0600 Subject: [PATCH 062/338] docs: request only a single stream in dataframe example (#114) * docs: request only a single stream in dataframe example * blacken --- bigquery_storage/to_dataframe/main_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/main_test.py b/bigquery_storage/to_dataframe/main_test.py index 4682057800d..bda6d601318 100644 --- a/bigquery_storage/to_dataframe/main_test.py +++ b/bigquery_storage/to_dataframe/main_test.py @@ -124,7 +124,7 @@ def test_session_to_dataframe(capsys, clients): read_options=read_options, ) read_session = bqstorageclient.create_read_session( - parent=parent, read_session=requested_session + parent=parent, read_session=requested_session, max_stream_count=1, ) # This example reads from only a single stream. Read from multiple streams From 6684e7c42fe0432f1ceb6507dc21b0c77199024d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 26 Jan 2021 19:51:14 +0100 Subject: [PATCH 063/338] chore(deps): update dependency google-cloud-bigquery to v2.6.2 (#123) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 6c9dc681c34..9fe55dd70c0 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.24.0 google-cloud-bigquery-storage==2.1.0 -google-cloud-bigquery==2.6.1 +google-cloud-bigquery==2.6.2 pyarrow==2.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From babff4ff61a304ddd433aed64849e957cdb92ee0 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 28 Jan 2021 23:02:44 +0100 Subject: [PATCH 064/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.2.1 (#126) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 6361f957fc8..ddc998aa1ca 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.1.0 +google-cloud-bigquery-storage==2.2.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 9fe55dd70c0..384a61f1099 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==1.24.0 -google-cloud-bigquery-storage==2.1.0 +google-cloud-bigquery-storage==2.2.1 google-cloud-bigquery==2.6.2 pyarrow==2.0.0 ipython==7.10.2; python_version > '3.0' From 3f9f11828ffd9658f38bb58643d6074cc0781ce2 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 28 Jan 2021 23:24:23 +0100 Subject: [PATCH 065/338] chore(deps): update dependency pyarrow to v3 (#127) * chore(deps): update dependency pyarrow to v3 * update bigquery version Co-authored-by: Tim Swast --- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 384a61f1099..e6e46cdd23f 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,7 +1,7 @@ google-auth==1.24.0 google-cloud-bigquery-storage==2.2.1 -google-cloud-bigquery==2.6.2 -pyarrow==2.0.0 +google-cloud-bigquery==2.7.0 +pyarrow==3.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' pandas==0.25.3; python_version > '3.0' From a3b841d765b6056e83b4435f8a1f208b62c3c31a Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 8 Feb 2021 09:20:26 -0800 Subject: [PATCH 066/338] chore: include py.typed files in release (#138) chore(python): include py.typed files in release A py.typed file must be included in the released package for it to be considered typed by type checkers. https://www.python.org/dev/peps/pep-0561/#packaging-type-information. See https://github.com/googleapis/python-secret-manager/issues/79 Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Fri Feb 5 17:32:06 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: 33366574ffb9e11737b3547eb6f020ecae0536e8 Source-Link: https://github.com/googleapis/synthtool/commit/33366574ffb9e11737b3547eb6f020ecae0536e8 --- bigquery_storage/quickstart/noxfile.py | 2 +- bigquery_storage/to_dataframe/noxfile.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index bbd25fcdb5e..f2320ea0001 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -82,7 +82,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index bbd25fcdb5e..f2320ea0001 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -82,7 +82,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From fca0d4551f9216cf49d70c30410df7f78f353a70 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 12 Feb 2021 01:37:28 +0100 Subject: [PATCH 067/338] chore(deps): update dependency google-auth to v1.26.1 (#139) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index e6e46cdd23f..04553f64488 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.24.0 +google-auth==1.26.1 google-cloud-bigquery-storage==2.2.1 google-cloud-bigquery==2.7.0 pyarrow==3.0.0 From 438f0fbb746a9715047a0c5faa2e675dc1e76613 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 17 Feb 2021 20:49:39 +0100 Subject: [PATCH 068/338] chore(deps): update dependency google-cloud-bigquery to v2.8.0 (#141) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 04553f64488..5d63a278ef1 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.26.1 google-cloud-bigquery-storage==2.2.1 -google-cloud-bigquery==2.7.0 +google-cloud-bigquery==2.8.0 pyarrow==3.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From 2d5ae3e650f22ee4a3eca09ab57400c5c5028862 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 17 Feb 2021 20:50:09 +0100 Subject: [PATCH 069/338] chore(deps): update dependency google-auth to v1.27.0 (#142) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 5d63a278ef1..24a67b26ccb 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.26.1 +google-auth==1.27.0 google-cloud-bigquery-storage==2.2.1 google-cloud-bigquery==2.8.0 pyarrow==3.0.0 From e093c09c5eb8d167d3e3f2bff7f17138af9fd59f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 18 Feb 2021 23:09:22 +0100 Subject: [PATCH 070/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.3.0 (#145) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index ddc998aa1ca..9155b4b933b 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.2.1 +google-cloud-bigquery-storage==2.3.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 24a67b26ccb..ce88d3d7a03 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==1.27.0 -google-cloud-bigquery-storage==2.2.1 +google-cloud-bigquery-storage==2.3.0 google-cloud-bigquery==2.8.0 pyarrow==3.0.0 ipython==7.10.2; python_version > '3.0' From 8a2bb9809031cef2b24ccf7dc1a5f86f79c7349b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 19 Feb 2021 19:52:49 +0100 Subject: [PATCH 071/338] chore(deps): update dependency google-cloud-bigquery to v2.9.0 (#144) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index ce88d3d7a03..074e0512598 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.27.0 google-cloud-bigquery-storage==2.3.0 -google-cloud-bigquery==2.8.0 +google-cloud-bigquery==2.9.0 pyarrow==3.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From 5a8dda4f9108e2ca9e6335b4fda4a79f3d46095c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 25 Feb 2021 22:05:35 +0100 Subject: [PATCH 072/338] chore(deps): update dependency google-cloud-bigquery to v2.10.0 (#146) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 074e0512598..5cca9eb7836 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.27.0 google-cloud-bigquery-storage==2.3.0 -google-cloud-bigquery==2.9.0 +google-cloud-bigquery==2.10.0 pyarrow==3.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From b0acee2916ef0dc1567366679f7da58686bb8b91 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 9 Mar 2021 20:58:05 +0100 Subject: [PATCH 073/338] chore(deps): update dependency google-auth to v1.27.1 (#150) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 5cca9eb7836..0679da2ed92 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.27.0 +google-auth==1.27.1 google-cloud-bigquery-storage==2.3.0 google-cloud-bigquery==2.10.0 pyarrow==3.0.0 From b29090da056328e0675c220be161e12ac26f53cc Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 10 Mar 2021 21:42:28 +0100 Subject: [PATCH 074/338] chore(deps): update dependency google-cloud-bigquery to v2.11.0 (#152) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 0679da2ed92..5f4eaa3e7f8 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.27.1 google-cloud-bigquery-storage==2.3.0 -google-cloud-bigquery==2.10.0 +google-cloud-bigquery==2.11.0 pyarrow==3.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From 9358726872eae56147551b5d0691f7b1bb7549cb Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 17 Mar 2021 15:15:56 +0100 Subject: [PATCH 075/338] chore(deps): update dependency google-cloud-bigquery to v2.12.0 (#157) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 5f4eaa3e7f8..072ac02afe2 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.27.1 google-cloud-bigquery-storage==2.3.0 -google-cloud-bigquery==2.11.0 +google-cloud-bigquery==2.12.0 pyarrow==3.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From f79e951448597175d0448d25a1898eed6df7c9eb Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 17 Mar 2021 15:37:52 +0100 Subject: [PATCH 076/338] chore(deps): update dependency google-auth to v1.28.0 (#158) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 072ac02afe2..29a45256833 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.27.1 +google-auth==1.28.0 google-cloud-bigquery-storage==2.3.0 google-cloud-bigquery==2.12.0 pyarrow==3.0.0 From ec995a32bddd72d037cdadf3da2575501d4ce066 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 7 Apr 2021 17:34:43 +0200 Subject: [PATCH 077/338] chore(deps): update dependency google-cloud-bigquery to v2.13.1 (#163) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 29a45256833..c5bae2eb2a2 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.28.0 google-cloud-bigquery-storage==2.3.0 -google-cloud-bigquery==2.12.0 +google-cloud-bigquery==2.13.1 pyarrow==3.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From 1f33907e722f292ac377a25989208b3d5959716a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 7 Apr 2021 21:51:37 +0200 Subject: [PATCH 078/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.4.0 (#174) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 9155b4b933b..f3466a917d3 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.3.0 +google-cloud-bigquery-storage==2.4.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index c5bae2eb2a2..737ae3f0ac3 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==1.28.0 -google-cloud-bigquery-storage==2.3.0 +google-cloud-bigquery-storage==2.4.0 google-cloud-bigquery==2.13.1 pyarrow==3.0.0 ipython==7.10.2; python_version > '3.0' From 1fd389195c6331a1c62f8c34746a80aedb0a15ca Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 10 Apr 2021 07:42:37 +0200 Subject: [PATCH 079/338] chore(deps): update dependency google-auth to v1.28.1 (#175) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 737ae3f0ac3..589b6a82d3b 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.28.0 +google-auth==1.28.1 google-cloud-bigquery-storage==2.4.0 google-cloud-bigquery==2.13.1 pyarrow==3.0.0 From 102e055d71b4c707d17c4e922da7c112e44d698e Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 13 Apr 2021 08:16:03 -0700 Subject: [PATCH 080/338] chore: add constraints file check for python samples (#176) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/b7a528df-1b0b-42e0-a583-e53b45ee05fc/targets - [ ] To automatically regenerate this PR, check this box. (May take up to 24 hours.) Source-Link: https://github.com/googleapis/synthtool/commit/0a071b3460344886297a304253bf924aa68ddb7e --- bigquery_storage/quickstart/noxfile.py | 10 ++++++++-- bigquery_storage/to_dataframe/noxfile.py | 10 ++++++++-- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index f2320ea0001..be1a3f25149 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -174,10 +174,16 @@ def _session_tests( ) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index f2320ea0001..be1a3f25149 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -174,10 +174,16 @@ def _session_tests( ) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) From d5283650f7f77bb5fa73ff3f40b0aa6752e16215 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 14 Apr 2021 23:10:16 +0200 Subject: [PATCH 081/338] chore(deps): update dependency pytest to v6.2.3 (#177) --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 7e460c8c866..f7e3ec09da6 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==6.0.1 +pytest==6.2.3 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 7e460c8c866..f7e3ec09da6 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==6.0.1 +pytest==6.2.3 From 275ffe99a87eae11a927093c98f72ee1bc324762 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 27 Apr 2021 16:38:03 +0200 Subject: [PATCH 082/338] chore(deps): update dependency pyarrow to v4 (#185) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [pyarrow](https://arrow.apache.org/) | `==3.0.0` -> `==4.0.0` | [![age](https://badges.renovateapi.com/packages/pypi/pyarrow/4.0.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/pyarrow/4.0.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/pyarrow/4.0.0/compatibility-slim/3.0.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/pyarrow/4.0.0/confidence-slim/3.0.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 589b6a82d3b..af15e30e486 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,7 +1,7 @@ google-auth==1.28.1 google-cloud-bigquery-storage==2.4.0 google-cloud-bigquery==2.13.1 -pyarrow==3.0.0 +pyarrow==4.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' pandas==0.25.3; python_version > '3.0' From 032a89baca2711d5721b2f6dbd78c7eb0a124407 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 28 Apr 2021 17:30:11 +0200 Subject: [PATCH 083/338] chore(deps): update dependency google-auth to v1.30.0 (#181) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index af15e30e486..312b26cf0f6 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.28.1 +google-auth==1.30.0 google-cloud-bigquery-storage==2.4.0 google-cloud-bigquery==2.13.1 pyarrow==4.0.0 From 7194505ce680d5c7eaef356a10b1b48c46069e23 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 28 Apr 2021 17:42:02 +0200 Subject: [PATCH 084/338] chore(deps): update dependency google-cloud-bigquery to v2.14.0 (#184) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-bigquery](https://togithub.com/googleapis/python-bigquery) | `==2.13.1` -> `==2.14.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.14.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.14.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.14.0/compatibility-slim/2.13.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.14.0/confidence-slim/2.13.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-bigquery ### [`v2.14.0`](https://togithub.com/googleapis/python-bigquery/blob/master/CHANGELOG.md#​2140-httpswwwgithubcomgoogleapispython-bigquerycomparev2131v2140-2021-04-26) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.13.1...v2.14.0) ##### Features - accept DatasetListItem where DatasetReference is accepted ([#​597](https://www.github.com/googleapis/python-bigquery/issues/597)) ([c8b5581](https://www.github.com/googleapis/python-bigquery/commit/c8b5581ea3c94005d69755c4a3b5a0d8900f3fe2)) - accept job object as argument to `get_job` and `cancel_job` ([#​617](https://www.github.com/googleapis/python-bigquery/issues/617)) ([f75dcdf](https://www.github.com/googleapis/python-bigquery/commit/f75dcdf3943b87daba60011c9a3b42e34ff81910)) - add `Client.delete_job_metadata` method to remove job metadata ([#​610](https://www.github.com/googleapis/python-bigquery/issues/610)) ([0abb566](https://www.github.com/googleapis/python-bigquery/commit/0abb56669c097c59fbffce007c702e7a55f2d9c1)) - add `max_queue_size` argument to `RowIterator.to_dataframe_iterable` ([#​575](https://www.github.com/googleapis/python-bigquery/issues/575)) ([f95f415](https://www.github.com/googleapis/python-bigquery/commit/f95f415d3441b3928f6cc705cb8a75603d790fd6)) - add type hints for public methods ([#​613](https://www.github.com/googleapis/python-bigquery/issues/613)) ([f8d4aaa](https://www.github.com/googleapis/python-bigquery/commit/f8d4aaa335a0eef915e73596fc9b43b11d11be9f)) - DB API cursors are now iterable ([#​618](https://www.github.com/googleapis/python-bigquery/issues/618)) ([e0b373d](https://www.github.com/googleapis/python-bigquery/commit/e0b373d0e721a70656ed8faceb7f5c70f642d144)) - retry google.auth TransportError by default ([#​624](https://www.github.com/googleapis/python-bigquery/issues/624)) ([34ecc3f](https://www.github.com/googleapis/python-bigquery/commit/34ecc3f1ca0ff073330c0c605673d89b43af7ed9)) - use pyarrow stream compression, if available ([#​593](https://www.github.com/googleapis/python-bigquery/issues/593)) ([dde9dc5](https://www.github.com/googleapis/python-bigquery/commit/dde9dc5114c2311fb76fafc5b222fff561e8abf1)) ##### Bug Fixes - consistent percents handling in DB API query ([#​619](https://www.github.com/googleapis/python-bigquery/issues/619)) ([6502a60](https://www.github.com/googleapis/python-bigquery/commit/6502a602337ae562652a20b20270949f2c9d5073)) - missing license headers in new test files ([#​604](https://www.github.com/googleapis/python-bigquery/issues/604)) ([df48cc5](https://www.github.com/googleapis/python-bigquery/commit/df48cc5a0be99ad39d5835652d1b7422209afc5d)) - unsetting clustering fields on Table is now possible ([#​622](https://www.github.com/googleapis/python-bigquery/issues/622)) ([33a871f](https://www.github.com/googleapis/python-bigquery/commit/33a871f06329f9bf5a6a92fab9ead65bf2bee75d)) ##### Documentation - add sample to run DML query ([#​591](https://www.github.com/googleapis/python-bigquery/issues/591)) ([ff2ec3a](https://www.github.com/googleapis/python-bigquery/commit/ff2ec3abe418a443cd07751c08e654f94e8b3155)) - update the description of the return value of `_QueryResults.rows()` ([#​594](https://www.github.com/googleapis/python-bigquery/issues/594)) ([8f4c0b8](https://www.github.com/googleapis/python-bigquery/commit/8f4c0b84dac3840532d7865247b8ad94b625b897)) ##### [2.13.1](https://www.github.com/googleapis/python-bigquery/compare/v2.13.0...v2.13.1) (2021-03-23) ##### Bug Fixes - add ConnectionError to default retry ([#​571](https://www.github.com/googleapis/python-bigquery/issues/571)) ([a3edb8b](https://www.github.com/googleapis/python-bigquery/commit/a3edb8b921e029e2c03d33302d408ad5d4e9d4ad))
--- ### Configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 312b26cf0f6..da436abc4e1 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.30.0 google-cloud-bigquery-storage==2.4.0 -google-cloud-bigquery==2.13.1 +google-cloud-bigquery==2.14.0 pyarrow==4.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From d5f5e3f5c2eac9e832c4cb0bfc2ba474fa3a7fa6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 19 May 2021 15:12:02 +0200 Subject: [PATCH 085/338] chore(deps): update dependency pytest to v6.2.4 (#190) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [pytest](https://docs.pytest.org/en/latest/) ([source](https://togithub.com/pytest-dev/pytest), [changelog](https://docs.pytest.org/en/stable/changelog.html)) | `==6.2.3` -> `==6.2.4` | [![age](https://badges.renovateapi.com/packages/pypi/pytest/6.2.4/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/pytest/6.2.4/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/pytest/6.2.4/compatibility-slim/6.2.3)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/pytest/6.2.4/confidence-slim/6.2.3)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
pytest-dev/pytest ### [`v6.2.4`](https://togithub.com/pytest-dev/pytest/releases/6.2.4) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.2.3...6.2.4) # pytest 6.2.4 (2021-05-04) ## Bug Fixes - [#​8539](https://togithub.com/pytest-dev/pytest/issues/8539): Fixed assertion rewriting on Python 3.10.
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻️ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index f7e3ec09da6..95ea1e6a02b 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==6.2.3 +pytest==6.2.4 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index f7e3ec09da6..95ea1e6a02b 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==6.2.3 +pytest==6.2.4 From aacfd60f57fd1d08c73c8c32141b5ce7c4eefcc2 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 19 May 2021 15:14:02 +0200 Subject: [PATCH 086/338] chore(deps): update dependency google-cloud-bigquery to v2.16.1 (#189) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-bigquery](https://togithub.com/googleapis/python-bigquery) | `==2.14.0` -> `==2.16.1` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.16.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.16.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.16.1/compatibility-slim/2.14.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.16.1/confidence-slim/2.14.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-bigquery ### [`v2.16.1`](https://togithub.com/googleapis/python-bigquery/blob/master/CHANGELOG.md#​2161-httpswwwgithubcomgoogleapispython-bigquerycomparev2160v2161-2021-05-12) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.16.0...v2.16.1) ### [`v2.16.0`](https://togithub.com/googleapis/python-bigquery/blob/master/CHANGELOG.md#​2160-httpswwwgithubcomgoogleapispython-bigquerycomparev2150v2160-2021-05-05) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.15.0...v2.16.0) ##### Features - add with_name() to ScalarQueryParameterType ([#​644](https://www.github.com/googleapis/python-bigquery/issues/644)) ([6cc6876](https://www.github.com/googleapis/python-bigquery/commit/6cc6876eb0e5bf49fdc047256a945dcf1b289576)) ##### Dependencies - expand supported pyarrow versions to v4 ([#​643](https://www.github.com/googleapis/python-bigquery/issues/643)) ([9e1d386](https://www.github.com/googleapis/python-bigquery/commit/9e1d3869c2024fe7a8af57ff59838d904ca5db03)) ### [`v2.15.0`](https://togithub.com/googleapis/python-bigquery/blob/master/CHANGELOG.md#​2150-httpswwwgithubcomgoogleapispython-bigquerycomparev2140v2150-2021-04-29) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.14.0...v2.15.0) ##### Features - Extended DB API parameter syntax to optionally provide parameter types ([#​626](https://www.github.com/googleapis/python-bigquery/issues/626)) ([8bcf397](https://www.github.com/googleapis/python-bigquery/commit/8bcf397fbe2527e06317741875a059b109cfcd9c)) ##### Bug Fixes - add DECIMAL and BIGDECIMAL as aliases for NUMERIC and BIGNUMERIC ([#​638](https://www.github.com/googleapis/python-bigquery/issues/638)) ([aa59023](https://www.github.com/googleapis/python-bigquery/commit/aa59023317b1c63720fb717b3544f755652da58d)) - The DB API Binary function accepts bytes data ([#​630](https://www.github.com/googleapis/python-bigquery/issues/630)) ([4396e70](https://www.github.com/googleapis/python-bigquery/commit/4396e70771af6889d3242c37c5ff2e80241023a2))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻️ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index da436abc4e1..6dae7aba3f1 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.30.0 google-cloud-bigquery-storage==2.4.0 -google-cloud-bigquery==2.14.0 +google-cloud-bigquery==2.16.1 pyarrow==4.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From fa260af380b345c7c936b11eeb77125f61fd3841 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 22 May 2021 12:34:04 +0200 Subject: [PATCH 087/338] chore(deps): update dependency google-cloud-bigquery to v2.17.0 (#199) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-bigquery](https://togithub.com/googleapis/python-bigquery) | `==2.16.1` -> `==2.17.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.17.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.17.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.17.0/compatibility-slim/2.16.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.17.0/confidence-slim/2.16.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-bigquery ### [`v2.17.0`](https://togithub.com/googleapis/python-bigquery/blob/master/CHANGELOG.md#​2170-httpswwwgithubcomgoogleapispython-bigquerycomparev2161v2170-2021-05-21) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.16.1...v2.17.0) ##### Features - detect obsolete BQ Storage extra at runtime ([#​666](https://www.github.com/googleapis/python-bigquery/issues/666)) ([bd7dbda](https://www.github.com/googleapis/python-bigquery/commit/bd7dbdae5c972b16bafc53c67911eeaa3255a880)) - Support parameterized NUMERIC, BIGNUMERIC, STRING, and BYTES types ([#​673](https://www.github.com/googleapis/python-bigquery/issues/673)) ([45421e7](https://www.github.com/googleapis/python-bigquery/commit/45421e73bfcddb244822e6a5cd43be6bd1ca2256)) ##### Bug Fixes - **tests:** invalid path to strptime() ([#​672](https://www.github.com/googleapis/python-bigquery/issues/672)) ([591cdd8](https://www.github.com/googleapis/python-bigquery/commit/591cdd851bb1321b048a05a378a0ef48d3ade462)) ##### [2.16.1](https://www.github.com/googleapis/python-bigquery/compare/v2.16.0...v2.16.1) (2021-05-12) ##### Bug Fixes - executemany rowcount only reflected the last execution ([#​660](https://www.github.com/googleapis/python-bigquery/issues/660)) ([aeadc8c](https://www.github.com/googleapis/python-bigquery/commit/aeadc8c2d614bb9f0883ec901fca48930f3aaf19))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻️ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 6dae7aba3f1..65a5f01e059 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.30.0 google-cloud-bigquery-storage==2.4.0 -google-cloud-bigquery==2.16.1 +google-cloud-bigquery==2.17.0 pyarrow==4.0.0 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From b52edbda9c90ca547b1f22342925837d4dbeffeb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 28 May 2021 16:34:06 +0000 Subject: [PATCH 088/338] chore: new owl bot post processor docker image (#202) gcr.io/repo-automation-bots/owlbot-python:latest@sha256:3c3a445b3ddc99ccd5d31edc4b4519729635d20693900db32c4f587ed51f7479 --- bigquery_storage/quickstart/noxfile.py | 9 ++++++++- bigquery_storage/to_dataframe/noxfile.py | 9 ++++++++- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index be1a3f25149..160fe728648 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -48,6 +48,10 @@ # to use your own Cloud project. "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. "envs": {}, @@ -172,6 +176,9 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): if os.path.exists("constraints.txt"): @@ -198,7 +205,7 @@ def _session_tests( # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index be1a3f25149..160fe728648 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -48,6 +48,10 @@ # to use your own Cloud project. "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. "envs": {}, @@ -172,6 +176,9 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): if os.path.exists("constraints.txt"): @@ -198,7 +205,7 @@ def _session_tests( # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) From 8d9ed94cb769bc469f0c3be343f66e1386eb4944 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 16 Jun 2021 13:30:02 +0200 Subject: [PATCH 089/338] chore(deps): update dependency pyarrow to v4.0.1 (#208) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [pyarrow](https://arrow.apache.org/) | `==4.0.0` -> `==4.0.1` | [![age](https://badges.renovateapi.com/packages/pypi/pyarrow/4.0.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/pyarrow/4.0.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/pyarrow/4.0.1/compatibility-slim/4.0.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/pyarrow/4.0.1/confidence-slim/4.0.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻️ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 65a5f01e059..38501de00f6 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,7 +1,7 @@ google-auth==1.30.0 google-cloud-bigquery-storage==2.4.0 google-cloud-bigquery==2.17.0 -pyarrow==4.0.0 +pyarrow==4.0.1 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' pandas==0.25.3; python_version > '3.0' From 36896381a58f99f4c2eb44f131055c66974ec029 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 16 Jun 2021 13:31:14 +0200 Subject: [PATCH 090/338] chore(deps): update dependency google-auth to v1.31.0 (#204) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 38501de00f6..1a15bb32d42 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.30.0 +google-auth==1.31.0 google-cloud-bigquery-storage==2.4.0 google-cloud-bigquery==2.17.0 pyarrow==4.0.1 From 4f1f2e2b299ad544cef2d0a9258c7799ad5787aa Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 16 Jun 2021 13:44:02 +0200 Subject: [PATCH 091/338] chore(deps): update dependency google-cloud-bigquery to v2.20.0 (#209) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-bigquery](https://togithub.com/googleapis/python-bigquery) | `==2.17.0` -> `==2.20.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.20.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.20.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.20.0/compatibility-slim/2.17.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.20.0/confidence-slim/2.17.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-bigquery ### [`v2.20.0`](https://togithub.com/googleapis/python-bigquery/blob/master/CHANGELOG.md#​2200-httpswwwgithubcomgoogleapispython-bigquerycomparev2190v2200-2021-06-07) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.19.0...v2.20.0) ##### Features - support script options in query job config ([#​690](https://www.github.com/googleapis/python-bigquery/issues/690)) ([1259e16](https://www.github.com/googleapis/python-bigquery/commit/1259e16394784315368e8be959c1ac097782b62e)) ### [`v2.19.0`](https://togithub.com/googleapis/python-bigquery/blob/master/CHANGELOG.md#​2190-httpswwwgithubcomgoogleapispython-bigquerycomparev2180v2190-2021-06-06) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.18.0...v2.19.0) ##### Features - list_tables, list_projects, list_datasets, list_models, list_routines, and list_jobs now accept a page_size parameter to control page size ([#​686](https://www.github.com/googleapis/python-bigquery/issues/686)) ([1f1c4b7](https://www.github.com/googleapis/python-bigquery/commit/1f1c4b7ba4390fc4c5c8186bc22b83b45304ca06)) ### [`v2.18.0`](https://togithub.com/googleapis/python-bigquery/blob/master/CHANGELOG.md#​2180-httpswwwgithubcomgoogleapispython-bigquerycomparev2170v2180-2021-06-02) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.17.0...v2.18.0) ##### Features - add support for Parquet options ([#​679](https://www.github.com/googleapis/python-bigquery/issues/679)) ([d792ce0](https://www.github.com/googleapis/python-bigquery/commit/d792ce09388a6ee3706777915dd2818d4c854f79))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 1a15bb32d42..7e1008ebcff 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.31.0 google-cloud-bigquery-storage==2.4.0 -google-cloud-bigquery==2.17.0 +google-cloud-bigquery==2.20.0 pyarrow==4.0.1 ipython==7.10.2; python_version > '3.0' ipython==5.9.0; python_version < '3.0' From d099309436d258f652a242b81c60140eb19d50ce Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 26 Jun 2021 01:12:24 +0200 Subject: [PATCH 092/338] chore(deps): update dependency google-auth to v1.32.0 (#215) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-auth](https://togithub.com/googleapis/google-auth-library-python) | `==1.31.0` -> `==1.32.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-auth/1.32.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-auth/1.32.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-auth/1.32.0/compatibility-slim/1.31.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-auth/1.32.0/confidence-slim/1.31.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/google-auth-library-python ### [`v1.32.0`](https://togithub.com/googleapis/google-auth-library-python/blob/master/CHANGELOG.md#​1320-httpswwwgithubcomgoogleapisgoogle-auth-library-pythoncomparev1310v1320-2021-06-16) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v1.31.0...v1.32.0) ##### Features - allow scopes for self signed jwt ([#​776](https://www.github.com/googleapis/google-auth-library-python/issues/776)) ([2cfe655](https://www.github.com/googleapis/google-auth-library-python/commit/2cfe655bba837170abc07701557a1a5e0fe3294e))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 7e1008ebcff..455e6894ed9 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.31.0 +google-auth==1.32.0 google-cloud-bigquery-storage==2.4.0 google-cloud-bigquery==2.20.0 pyarrow==4.0.1 From aefbf499bd0503f06be081a36c2d6185a3103b8d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 1 Jul 2021 04:10:08 +0200 Subject: [PATCH 093/338] chore(deps): update dependency google-auth to v1.32.1 (#226) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-auth](https://togithub.com/googleapis/google-auth-library-python) | `==1.32.0` -> `==1.32.1` | [![age](https://badges.renovateapi.com/packages/pypi/google-auth/1.32.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-auth/1.32.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-auth/1.32.1/compatibility-slim/1.32.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-auth/1.32.1/confidence-slim/1.32.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/google-auth-library-python ### [`v1.32.1`](https://togithub.com/googleapis/google-auth-library-python/blob/master/CHANGELOG.md#​1321-httpswwwgithubcomgoogleapisgoogle-auth-library-pythoncomparev1320v1321-2021-06-30) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v1.32.0...v1.32.1)
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 455e6894ed9..7ce4603b8a2 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.32.0 +google-auth==1.32.1 google-cloud-bigquery-storage==2.4.0 google-cloud-bigquery==2.20.0 pyarrow==4.0.1 From c55828e3d6a082a88a982e377cb26a983623431b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 1 Jul 2021 17:03:08 +0200 Subject: [PATCH 094/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.5.0 (#222) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index f3466a917d3..9eddd04697e 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.4.0 +google-cloud-bigquery-storage==2.5.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 7ce4603b8a2..45aeaa2ff88 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==1.32.1 -google-cloud-bigquery-storage==2.4.0 +google-cloud-bigquery-storage==2.5.0 google-cloud-bigquery==2.20.0 pyarrow==4.0.1 ipython==7.10.2; python_version > '3.0' From b11fc7eed0ed9dd6463e4060bcae5ca34dbbdae1 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 9 Jul 2021 22:03:46 +0200 Subject: [PATCH 095/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.6.0 (#231) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 9eddd04697e..ca0693356fb 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.5.0 +google-cloud-bigquery-storage==2.6.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 45aeaa2ff88..894fd023343 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==1.32.1 -google-cloud-bigquery-storage==2.5.0 +google-cloud-bigquery-storage==2.6.0 google-cloud-bigquery==2.20.0 pyarrow==4.0.1 ipython==7.10.2; python_version > '3.0' From 520eaf9859ff6dbe6f04ba204aec00143979cde9 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 13 Jul 2021 14:11:14 -0500 Subject: [PATCH 096/338] docs: pandas DataFrame samples are more standalone (#224) * docs: pandas DataFrame samples are more standalone * fix region tag * fix region tag * remove unused imports * blacken * remove session from call to rows/to_dataframe --- bigquery_storage/conftest.py | 22 ++++++ .../quickstart/quickstart_test.py | 8 --- bigquery_storage/to_dataframe/noxfile.py | 2 +- .../to_dataframe/read_query_results.py | 47 +++++++++++++ .../to_dataframe/read_query_results_test.py | 21 ++++++ .../to_dataframe/read_table_bigquery.py | 42 +++++++++++ .../to_dataframe/read_table_bigquery_test.py | 21 ++++++ .../to_dataframe/read_table_bqstorage.py | 69 +++++++++++++++++++ .../to_dataframe/read_table_bqstorage_test.py | 21 ++++++ .../to_dataframe/requirements.txt | 8 +-- 10 files changed, 248 insertions(+), 13 deletions(-) create mode 100644 bigquery_storage/conftest.py create mode 100644 bigquery_storage/to_dataframe/read_query_results.py create mode 100644 bigquery_storage/to_dataframe/read_query_results_test.py create mode 100644 bigquery_storage/to_dataframe/read_table_bigquery.py create mode 100644 bigquery_storage/to_dataframe/read_table_bigquery_test.py create mode 100644 bigquery_storage/to_dataframe/read_table_bqstorage.py create mode 100644 bigquery_storage/to_dataframe/read_table_bqstorage_test.py diff --git a/bigquery_storage/conftest.py b/bigquery_storage/conftest.py new file mode 100644 index 00000000000..92068ef539c --- /dev/null +++ b/bigquery_storage/conftest.py @@ -0,0 +1,22 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest + + +@pytest.fixture(scope="session") +def project_id(): + return os.environ["GOOGLE_CLOUD_PROJECT"] diff --git a/bigquery_storage/quickstart/quickstart_test.py b/bigquery_storage/quickstart/quickstart_test.py index 23f3c350413..8e1e0dfdef5 100644 --- a/bigquery_storage/quickstart/quickstart_test.py +++ b/bigquery_storage/quickstart/quickstart_test.py @@ -13,9 +13,6 @@ # limitations under the License. import datetime -import os - -import pytest from . import quickstart @@ -27,11 +24,6 @@ def now_millis(): ) -@pytest.fixture() -def project_id(): - return os.environ["GOOGLE_CLOUD_PROJECT"] - - def test_quickstart_wo_snapshot(capsys, project_id): quickstart.main(project_id) out, _ = capsys.readouterr() diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index 160fe728648..b3c8658a3a7 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -226,7 +226,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/bigquery_storage/to_dataframe/read_query_results.py b/bigquery_storage/to_dataframe/read_query_results.py new file mode 100644 index 00000000000..45bae1eac44 --- /dev/null +++ b/bigquery_storage/to_dataframe/read_query_results.py @@ -0,0 +1,47 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def read_query_results(): + # [START bigquerystorage_pandas_tutorial_read_query_results] + from google.cloud import bigquery + + bqclient = bigquery.Client() + + # Download query results. + query_string = """ + SELECT + CONCAT( + 'https://stackoverflow.com/questions/', + CAST(id as STRING)) as url, + view_count + FROM `bigquery-public-data.stackoverflow.posts_questions` + WHERE tags like '%google-bigquery%' + ORDER BY view_count DESC + """ + + dataframe = ( + bqclient.query(query_string) + .result() + .to_dataframe( + # Optionally, explicitly request to use the BigQuery Storage API. As of + # google-cloud-bigquery version 1.26.0 and above, the BigQuery Storage + # API is used by default. + create_bqstorage_client=True, + ) + ) + print(dataframe.head()) + # [END bigquerystorage_pandas_tutorial_read_query_results] + + return dataframe diff --git a/bigquery_storage/to_dataframe/read_query_results_test.py b/bigquery_storage/to_dataframe/read_query_results_test.py new file mode 100644 index 00000000000..55b55a08235 --- /dev/null +++ b/bigquery_storage/to_dataframe/read_query_results_test.py @@ -0,0 +1,21 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from . import read_query_results + + +def test_read_query_results(capsys): + read_query_results.read_query_results() + out, _ = capsys.readouterr() + assert "stackoverflow" in out diff --git a/bigquery_storage/to_dataframe/read_table_bigquery.py b/bigquery_storage/to_dataframe/read_table_bigquery.py new file mode 100644 index 00000000000..82d8879b1c8 --- /dev/null +++ b/bigquery_storage/to_dataframe/read_table_bigquery.py @@ -0,0 +1,42 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def read_table(): + # [START bigquerystorage_pandas_tutorial_read_table] + from google.cloud import bigquery + + bqclient = bigquery.Client() + + # Download a table. + table = bigquery.TableReference.from_string( + "bigquery-public-data.utility_us.country_code_iso" + ) + rows = bqclient.list_rows( + table, + selected_fields=[ + bigquery.SchemaField("country_name", "STRING"), + bigquery.SchemaField("fips_code", "STRING"), + ], + ) + dataframe = rows.to_dataframe( + # Optionally, explicitly request to use the BigQuery Storage API. As of + # google-cloud-bigquery version 1.26.0 and above, the BigQuery Storage + # API is used by default. + create_bqstorage_client=True, + ) + print(dataframe.head()) + # [END bigquerystorage_pandas_tutorial_read_table] + + return dataframe diff --git a/bigquery_storage/to_dataframe/read_table_bigquery_test.py b/bigquery_storage/to_dataframe/read_table_bigquery_test.py new file mode 100644 index 00000000000..c8301857108 --- /dev/null +++ b/bigquery_storage/to_dataframe/read_table_bigquery_test.py @@ -0,0 +1,21 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from . import read_table_bigquery + + +def test_read_table(capsys): + read_table_bigquery.read_table() + out, _ = capsys.readouterr() + assert "country_name" in out diff --git a/bigquery_storage/to_dataframe/read_table_bqstorage.py b/bigquery_storage/to_dataframe/read_table_bqstorage.py new file mode 100644 index 00000000000..0a3ae777867 --- /dev/null +++ b/bigquery_storage/to_dataframe/read_table_bqstorage.py @@ -0,0 +1,69 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def read_table(your_project_id): + original_your_project_id = your_project_id + # [START bigquerystorage_pandas_tutorial_read_session] + your_project_id = "project-for-read-session" + # [END bigquerystorage_pandas_tutorial_read_session] + your_project_id = original_your_project_id + + # [START bigquerystorage_pandas_tutorial_read_session] + from google.cloud import bigquery_storage + from google.cloud.bigquery_storage import types + import pandas + + bqstorageclient = bigquery_storage.BigQueryReadClient() + + project_id = "bigquery-public-data" + dataset_id = "new_york_trees" + table_id = "tree_species" + table = f"projects/{project_id}/datasets/{dataset_id}/tables/{table_id}" + + # Select columns to read with read options. If no read options are + # specified, the whole table is read. + read_options = types.ReadSession.TableReadOptions( + selected_fields=["species_common_name", "fall_color"] + ) + + parent = "projects/{}".format(your_project_id) + + requested_session = types.ReadSession( + table=table, + # Avro is also supported, but the Arrow data format is optimized to + # work well with column-oriented data structures such as pandas + # DataFrames. + data_format=types.DataFormat.ARROW, + read_options=read_options, + ) + read_session = bqstorageclient.create_read_session( + parent=parent, read_session=requested_session, max_stream_count=1, + ) + + # This example reads from only a single stream. Read from multiple streams + # to fetch data faster. Note that the session may not contain any streams + # if there are no rows to read. + stream = read_session.streams[0] + reader = bqstorageclient.read_rows(stream.name) + + # Parse all Arrow blocks and create a dataframe. + frames = [] + for message in reader.rows().pages: + frames.append(message.to_dataframe()) + dataframe = pandas.concat(frames) + print(dataframe.head()) + # [END bigquerystorage_pandas_tutorial_read_session] + + return dataframe diff --git a/bigquery_storage/to_dataframe/read_table_bqstorage_test.py b/bigquery_storage/to_dataframe/read_table_bqstorage_test.py new file mode 100644 index 00000000000..cc09307836c --- /dev/null +++ b/bigquery_storage/to_dataframe/read_table_bqstorage_test.py @@ -0,0 +1,21 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from . import read_table_bqstorage + + +def test_read_table(capsys, project_id): + read_table_bqstorage.read_table(your_project_id=project_id) + out, _ = capsys.readouterr() + assert "species_common_name" in out diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 894fd023343..ff4e18a845a 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -2,7 +2,7 @@ google-auth==1.32.1 google-cloud-bigquery-storage==2.6.0 google-cloud-bigquery==2.20.0 pyarrow==4.0.1 -ipython==7.10.2; python_version > '3.0' -ipython==5.9.0; python_version < '3.0' -pandas==0.25.3; python_version > '3.0' -pandas==0.24.2; python_version < '3.0' +ipython==7.24.0; python_version > '3.6' +ipython==7.16.1; python_version <= '3.6' +pandas==1.2.5; python_version > '3.6' +pandas==1.1.5; python_version <= '3.6' From a97acda0070fefa13a346b7a767413eb62342ff5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 14 Jul 2021 15:22:14 +0000 Subject: [PATCH 097/338] build(python): exit with success status if no samples found (#234) Source-Link: https://github.com/googleapis/synthtool/commit/53ea3896a52f87c758e79b5a19fa338c83925a98 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:e1793a23ae0ee9aafb2e3a53b564a351f74790dbe3c2d75f8fc3b8c43e5c036c --- bigquery_storage/to_dataframe/noxfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index b3c8658a3a7..160fe728648 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -226,7 +226,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """Returns the root folder of the project.""" + """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): From 99ba8273cb5757aaf623e68d2d93d7043581b820 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 14 Jul 2021 23:22:27 +0200 Subject: [PATCH 098/338] chore(deps): update dependency google-auth to v1.33.0 (#236) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-auth](https://togithub.com/googleapis/google-auth-library-python) | `==1.32.1` -> `==1.33.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-auth/1.33.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-auth/1.33.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-auth/1.33.0/compatibility-slim/1.32.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-auth/1.33.0/confidence-slim/1.32.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/google-auth-library-python ### [`v1.33.0`](https://togithub.com/googleapis/google-auth-library-python/blob/master/CHANGELOG.md#​1330-httpswwwgithubcomgoogleapisgoogle-auth-library-pythoncomparev1321v1330-2021-07-14) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v1.32.1...v1.33.0) ##### Features - define `CredentialAccessBoundary` classes ([#​793](https://www.github.com/googleapis/google-auth-library-python/issues/793)) ([d883921](https://www.github.com/googleapis/google-auth-library-python/commit/d883921ae8fdc92b2c2cf1b3a5cd389e1287eb60)) - define `google.auth.downscoped.Credentials` class ([#​801](https://www.github.com/googleapis/google-auth-library-python/issues/801)) ([2f5c3a6](https://www.github.com/googleapis/google-auth-library-python/commit/2f5c3a636192c20cf4c92c3831d1f485031d24d2)) - service account is able to use a private token endpoint ([#​784](https://www.github.com/googleapis/google-auth-library-python/issues/784)) ([0e26409](https://www.github.com/googleapis/google-auth-library-python/commit/0e264092e35ac02ad68d5d91424ecba5397daa41)) ##### Bug Fixes - fix fetch_id_token credential lookup order to match adc ([#​748](https://www.github.com/googleapis/google-auth-library-python/issues/748)) ([c34452e](https://www.github.com/googleapis/google-auth-library-python/commit/c34452ef450c42cfef37a1b0c548bb422302dd5d)) ##### Documentation - fix code block formatting in 'user-guide.rst' ([#​794](https://www.github.com/googleapis/google-auth-library-python/issues/794)) ([4fd84bd](https://www.github.com/googleapis/google-auth-library-python/commit/4fd84bdf43694af5107dc8c8b443c06ba2f61d2c)) ##### [1.32.1](https://www.github.com/googleapis/google-auth-library-python/compare/v1.32.0...v1.32.1) (2021-06-30) ##### Bug Fixes - avoid leaking sub-session created for '\_auth_request' ([#​789](https://www.github.com/googleapis/google-auth-library-python/issues/789)) ([2079ab5](https://www.github.com/googleapis/google-auth-library-python/commit/2079ab5e1db464f502248ae4f9e424deeef87fb2))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index ff4e18a845a..70d8a53b837 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.32.1 +google-auth==1.33.0 google-cloud-bigquery-storage==2.6.0 google-cloud-bigquery==2.20.0 pyarrow==4.0.1 From 07c0feb3f4d4d50093e54a8793caf48ea2b06b06 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 16 Jul 2021 17:33:19 +0200 Subject: [PATCH 099/338] chore(deps): update dependency google-cloud-bigquery to v2.21.0 (#232) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 70d8a53b837..142d49423fc 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.33.0 google-cloud-bigquery-storage==2.6.0 -google-cloud-bigquery==2.20.0 +google-cloud-bigquery==2.21.0 pyarrow==4.0.1 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' From 1d8ccd108af3acd066f4aedd0c706694265c72c9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 22 Jul 2021 13:48:33 +0000 Subject: [PATCH 100/338] feat: add Samples section to CONTRIBUTING.rst (#241) Source-Link: https://github.com/googleapis/synthtool/commit/52e4e46eff2a0b70e3ff5506a02929d089d077d4 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:6186535cbdbf6b9fe61f00294929221d060634dae4a0795c1cefdbc995b2d605 --- bigquery_storage/quickstart/noxfile.py | 5 +++-- bigquery_storage/to_dataframe/noxfile.py | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index 160fe728648..9fc7f17820d 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -28,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -159,7 +160,7 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: - session.install("black") + session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index 160fe728648..9fc7f17820d 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -28,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -159,7 +160,7 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: - session.install("black") + session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) From c3ffaf08ab17382ecf1a83313c5bbb1bfa754656 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 27 Jul 2021 17:48:30 +0200 Subject: [PATCH 101/338] chore(deps): update dependency google-cloud-bigquery to v2.22.1 (#239) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 142d49423fc..b44473141ec 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.33.0 google-cloud-bigquery-storage==2.6.0 -google-cloud-bigquery==2.21.0 +google-cloud-bigquery==2.22.1 pyarrow==4.0.1 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' From dab9b532de1a600a36728b57894023a511e78287 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 27 Jul 2021 17:48:51 +0200 Subject: [PATCH 102/338] chore(deps): update dependency google-auth to v1.33.1 (#243) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index b44473141ec..36cdaeda1bd 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.33.0 +google-auth==1.33.1 google-cloud-bigquery-storage==2.6.0 google-cloud-bigquery==2.22.1 pyarrow==4.0.1 From 5b7d0bb29704cf67b73edb61d5aada17e4739514 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 27 Jul 2021 10:50:22 -0500 Subject: [PATCH 103/338] docs: remove duplicate code samples (#246) --- bigquery_storage/to_dataframe/main_test.py | 144 --------------------- 1 file changed, 144 deletions(-) delete mode 100644 bigquery_storage/to_dataframe/main_test.py diff --git a/bigquery_storage/to_dataframe/main_test.py b/bigquery_storage/to_dataframe/main_test.py deleted file mode 100644 index bda6d601318..00000000000 --- a/bigquery_storage/to_dataframe/main_test.py +++ /dev/null @@ -1,144 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pytest - - -@pytest.fixture -def clients(): - # [START bigquerystorage_pandas_tutorial_all] - # [START bigquerystorage_pandas_tutorial_create_client] - import google.auth - from google.cloud import bigquery - from google.cloud import bigquery_storage - - # Explicitly create a credentials object. This allows you to use the same - # credentials for both the BigQuery and BigQuery Storage clients, avoiding - # unnecessary API calls to fetch duplicate authentication tokens. - credentials, your_project_id = google.auth.default( - scopes=["https://www.googleapis.com/auth/cloud-platform"] - ) - - # Make clients. - bqclient = bigquery.Client(credentials=credentials, project=your_project_id,) - bqstorageclient = bigquery_storage.BigQueryReadClient(credentials=credentials) - # [END bigquerystorage_pandas_tutorial_create_client] - # [END bigquerystorage_pandas_tutorial_all] - return bqclient, bqstorageclient - - -def test_table_to_dataframe(capsys, clients): - from google.cloud import bigquery - - bqclient, bqstorageclient = clients - - # [START bigquerystorage_pandas_tutorial_all] - # [START bigquerystorage_pandas_tutorial_read_table] - # Download a table. - table = bigquery.TableReference.from_string( - "bigquery-public-data.utility_us.country_code_iso" - ) - rows = bqclient.list_rows( - table, - selected_fields=[ - bigquery.SchemaField("country_name", "STRING"), - bigquery.SchemaField("fips_code", "STRING"), - ], - ) - dataframe = rows.to_dataframe(bqstorage_client=bqstorageclient) - print(dataframe.head()) - # [END bigquerystorage_pandas_tutorial_read_table] - # [END bigquerystorage_pandas_tutorial_all] - - out, _ = capsys.readouterr() - assert "country_name" in out - - -def test_query_to_dataframe(capsys, clients): - bqclient, bqstorageclient = clients - - # [START bigquerystorage_pandas_tutorial_all] - # [START bigquerystorage_pandas_tutorial_read_query_results] - # Download query results. - query_string = """ - SELECT - CONCAT( - 'https://stackoverflow.com/questions/', - CAST(id as STRING)) as url, - view_count - FROM `bigquery-public-data.stackoverflow.posts_questions` - WHERE tags like '%google-bigquery%' - ORDER BY view_count DESC - """ - - dataframe = ( - bqclient.query(query_string) - .result() - .to_dataframe(bqstorage_client=bqstorageclient) - ) - print(dataframe.head()) - # [END bigquerystorage_pandas_tutorial_read_query_results] - # [END bigquerystorage_pandas_tutorial_all] - - out, _ = capsys.readouterr() - assert "stackoverflow" in out - - -def test_session_to_dataframe(capsys, clients): - from google.cloud.bigquery_storage import types - - bqclient, bqstorageclient = clients - your_project_id = bqclient.project - - # [START bigquerystorage_pandas_tutorial_all] - # [START bigquerystorage_pandas_tutorial_read_session] - project_id = "bigquery-public-data" - dataset_id = "new_york_trees" - table_id = "tree_species" - table = f"projects/{project_id}/datasets/{dataset_id}/tables/{table_id}" - - # Select columns to read with read options. If no read options are - # specified, the whole table is read. - read_options = types.ReadSession.TableReadOptions( - selected_fields=["species_common_name", "fall_color"] - ) - - parent = "projects/{}".format(your_project_id) - - requested_session = types.ReadSession( - table=table, - # This API can also deliver data serialized in Apache Avro format. - # This example leverages Apache Arrow. - data_format=types.DataFormat.ARROW, - read_options=read_options, - ) - read_session = bqstorageclient.create_read_session( - parent=parent, read_session=requested_session, max_stream_count=1, - ) - - # This example reads from only a single stream. Read from multiple streams - # to fetch data faster. Note that the session may not contain any streams - # if there are no rows to read. - stream = read_session.streams[0] - reader = bqstorageclient.read_rows(stream.name) - - # Parse all Arrow blocks and create a dataframe. This call requires a - # session, because the session contains the schema for the row blocks. - dataframe = reader.to_dataframe(read_session) - print(dataframe.head()) - # [END bigquerystorage_pandas_tutorial_read_session] - # [END bigquerystorage_pandas_tutorial_all] - - out, _ = capsys.readouterr() - assert "species_common_name" in out From 6abad5ffd5040c67fa2664cfdfd43894cbf77d00 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 28 Jul 2021 13:21:15 +0200 Subject: [PATCH 104/338] chore(deps): update dependency google-cloud-bigquery to v2.23.0 (#253) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 36cdaeda1bd..44e3e40b1c5 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.33.1 google-cloud-bigquery-storage==2.6.0 -google-cloud-bigquery==2.22.1 +google-cloud-bigquery==2.23.0 pyarrow==4.0.1 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' From e0bb62197077628b36ac25b619cf54c3416cf365 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 28 Jul 2021 13:21:28 +0200 Subject: [PATCH 105/338] chore(deps): update dependency google-auth to v1.34.0 (#252) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 44e3e40b1c5..1435a6fc5d7 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.33.1 +google-auth==1.34.0 google-cloud-bigquery-storage==2.6.0 google-cloud-bigquery==2.23.0 pyarrow==4.0.1 From 8626f7f0ee1e21cead402293388e05af64ee18fb Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 28 Jul 2021 16:47:47 +0200 Subject: [PATCH 106/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.6.2 (#244) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index ca0693356fb..204c27295ad 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.6.0 +google-cloud-bigquery-storage==2.6.2 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 1435a6fc5d7..95260d1a9ce 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==1.34.0 -google-cloud-bigquery-storage==2.6.0 +google-cloud-bigquery-storage==2.6.2 google-cloud-bigquery==2.23.0 pyarrow==4.0.1 ipython==7.24.0; python_version > '3.6' From 5e4a9f601a3c5cb8eac72b01de6320317f395af8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 28 Jul 2021 21:46:09 +0200 Subject: [PATCH 107/338] chore(deps): update dependency pyarrow to v5 (#255) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 95260d1a9ce..0e0d4c79be8 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,7 +1,7 @@ google-auth==1.34.0 google-cloud-bigquery-storage==2.6.2 google-cloud-bigquery==2.23.0 -pyarrow==4.0.1 +pyarrow==5.0.0 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' pandas==1.2.5; python_version > '3.6' From d7d3c0f2c4023a599e095e489ae5d817d4c5462d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 28 Jul 2021 22:01:48 +0200 Subject: [PATCH 108/338] chore(deps): update dependency google-cloud-bigquery to v2.23.1 (#254) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 0e0d4c79be8..c06b7f29c79 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.34.0 google-cloud-bigquery-storage==2.6.2 -google-cloud-bigquery==2.23.0 +google-cloud-bigquery==2.23.1 pyarrow==5.0.0 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' From faa45238c0381591660a8863a6a04e88f114a965 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 29 Jul 2021 21:36:55 +0200 Subject: [PATCH 109/338] chore(deps): update dependency google-cloud-bigquery to v2.23.2 (#256) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index c06b7f29c79..6a851c9c3ac 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.34.0 google-cloud-bigquery-storage==2.6.2 -google-cloud-bigquery==2.23.1 +google-cloud-bigquery==2.23.2 pyarrow==5.0.0 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' From 5e30f5b399b772ef8de1fdc61c231d6aad1cd683 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 11 Aug 2021 16:32:30 +0000 Subject: [PATCH 110/338] chore: fix INSTALL_LIBRARY_FROM_SOURCE in noxfile.py (#270) Source-Link: https://github.com/googleapis/synthtool/commit/6252f2cd074c38f37b44abe5e96d128733eb1b61 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:50e35228649c47b6ca82aa0be3ff9eb2afce51c82b66c4a03fe4afeb5ff6c0fc --- bigquery_storage/quickstart/noxfile.py | 5 ++++- bigquery_storage/to_dataframe/noxfile.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index 9fc7f17820d..7dbea091476 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -94,7 +94,10 @@ def get_pytest_env_vars() -> Dict[str, str]: TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) # # Style Checks # diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index 9fc7f17820d..7dbea091476 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -94,7 +94,10 @@ def get_pytest_env_vars() -> Dict[str, str]: TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) # # Style Checks # From 3d133c000037d4eb489929590458c175dabd6a9e Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Wed, 11 Aug 2021 12:04:13 -0500 Subject: [PATCH 111/338] chore: remove unused samples (#268) Follow-up to https://github.com/googleapis/python-bigquery-storage/issues/225 I noticed we didn't need several of these samples now that BQ Storage API is used by default. --- bigquery_storage/to_dataframe/jupyter_test.py | 48 ------------------- 1 file changed, 48 deletions(-) diff --git a/bigquery_storage/to_dataframe/jupyter_test.py b/bigquery_storage/to_dataframe/jupyter_test.py index 7997ee2eac1..c2046b8c80e 100644 --- a/bigquery_storage/to_dataframe/jupyter_test.py +++ b/bigquery_storage/to_dataframe/jupyter_test.py @@ -49,60 +49,12 @@ def _strip_region_tags(sample_text): return "\n".join(magic_lines) -def test_jupyter_small_query(ipython): - ip = IPython.get_ipython() - ip.extension_manager.load_extension("google.cloud.bigquery") - - # Include a small query to demonstrate that it falls back to the - # tabledata.list API when the BQ Storage API cannot be used. - sample = """ - # [START bigquerystorage_jupyter_tutorial_fallback] - %%bigquery stackoverflow --use_bqstorage_api - SELECT - CONCAT( - 'https://stackoverflow.com/questions/', - CAST(id as STRING)) as url, - view_count - FROM `bigquery-public-data.stackoverflow.posts_questions` - WHERE tags like '%google-bigquery%' - ORDER BY view_count DESC - LIMIT 10 - # [END bigquerystorage_jupyter_tutorial_fallback] - """ - - result = ip.run_cell(_strip_region_tags(sample)) - result.raise_error() # Throws an exception if the cell failed. - assert "stackoverflow" in ip.user_ns # verify that variable exists - - def test_jupyter_tutorial(ipython): ip = IPython.get_ipython() ip.extension_manager.load_extension("google.cloud.bigquery") # This code sample intentionally queries a lot of data to demonstrate the # speed-up of using the BigQuery Storage API to download the results. - sample = """ - # [START bigquerystorage_jupyter_tutorial_query] - %%bigquery tax_forms --use_bqstorage_api - SELECT * FROM `bigquery-public-data.irs_990.irs_990_2012` - # [END bigquerystorage_jupyter_tutorial_query] - """ - result = ip.run_cell(_strip_region_tags(sample)) - result.raise_error() # Throws an exception if the cell failed. - - assert "tax_forms" in ip.user_ns # verify that variable exists - tax_forms = ip.user_ns["tax_forms"] - - # [START bigquerystorage_jupyter_tutorial_results] - tax_forms.head() - # [END bigquerystorage_jupyter_tutorial_results] - - # [START bigquerystorage_jupyter_tutorial_context] - import google.cloud.bigquery.magics - - google.cloud.bigquery.magics.context.use_bqstorage_api = True - # [END bigquerystorage_jupyter_tutorial_context] - sample = """ # [START bigquerystorage_jupyter_tutorial_query_default] %%bigquery tax_forms From 4b4125c71023dcada51ac6b064c086b4f24fa199 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 13 Aug 2021 11:51:48 -0400 Subject: [PATCH 112/338] chore: drop mention of Python 2.7 from templates (#272) Source-Link: https://github.com/googleapis/synthtool/commit/facee4cc1ea096cd8bcc008bb85929daa7c414c0 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:9743664022bd63a8084be67f144898314c7ca12f0a03e422ac17c733c129d803 Co-authored-by: Owl Bot --- bigquery_storage/quickstart/noxfile.py | 6 +++--- bigquery_storage/to_dataframe/noxfile.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index 7dbea091476..b008613f03f 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -39,7 +39,7 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + "ignored_versions": [], # Old samples are opted out of enforcing Python type hints # All new samples should feature them "enforce_type_hints": False, @@ -86,8 +86,8 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index 7dbea091476..b008613f03f 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -39,7 +39,7 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + "ignored_versions": [], # Old samples are opted out of enforcing Python type hints # All new samples should feature them "enforce_type_hints": False, @@ -86,8 +86,8 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From 9058918d79656b039515be269256e900fcd058df Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 13 Aug 2021 20:25:40 +0200 Subject: [PATCH 113/338] chore(deps): update dependency google-cloud-bigquery to v2.24.0 (#267) Co-authored-by: Anthonios Partheniou --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 6a851c9c3ac..82717d0cb63 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.34.0 google-cloud-bigquery-storage==2.6.2 -google-cloud-bigquery==2.23.2 +google-cloud-bigquery==2.24.0 pyarrow==5.0.0 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' From f4c7f18c85ee130e354666c2685150d6d4f5ac9d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 13 Aug 2021 20:51:21 +0200 Subject: [PATCH 114/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.6.3 (#266) Co-authored-by: Anthonios Partheniou Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 204c27295ad..a4b9ec9e071 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.6.2 +google-cloud-bigquery-storage==2.6.3 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 82717d0cb63..c21c4da0145 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==1.34.0 -google-cloud-bigquery-storage==2.6.2 +google-cloud-bigquery-storage==2.6.3 google-cloud-bigquery==2.24.0 pyarrow==5.0.0 ipython==7.24.0; python_version > '3.6' From 1b4c791413d52001385c955e772a1403a12daa6b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 18 Aug 2021 19:03:40 +0200 Subject: [PATCH 115/338] chore(deps): update dependency google-auth to v1.35.0 (#274) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index c21c4da0145..abb4bbe9492 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.34.0 +google-auth==1.35.0 google-cloud-bigquery-storage==2.6.3 google-cloud-bigquery==2.24.0 pyarrow==5.0.0 From 96c82762775c757903e81a399e1a0aec0c495bd9 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 23 Aug 2021 19:39:58 +0200 Subject: [PATCH 116/338] chore(deps): update dependency google-cloud-bigquery to v2.24.1 (#276) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index abb4bbe9492..bcbc1db7a26 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==1.35.0 google-cloud-bigquery-storage==2.6.3 -google-cloud-bigquery==2.24.0 +google-cloud-bigquery==2.24.1 pyarrow==5.0.0 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' From c0f1ea29e76d5710440312608efa684302209c45 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 23 Aug 2021 20:28:50 +0200 Subject: [PATCH 117/338] chore(deps): update dependency google-auth to v2 (#275) Co-authored-by: Tim Swast --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index bcbc1db7a26..298847ea7bd 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==1.35.0 +google-auth==2.0.1 google-cloud-bigquery-storage==2.6.3 google-cloud-bigquery==2.24.1 pyarrow==5.0.0 From 6534f6e7db7ec35606aa86beff236c1340983011 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 1 Sep 2021 12:55:54 +0200 Subject: [PATCH 118/338] chore(deps): update dependency google-auth to v2.0.2 (#289) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 298847ea7bd..d47ff23deb9 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.0.1 +google-auth==2.0.2 google-cloud-bigquery-storage==2.6.3 google-cloud-bigquery==2.24.1 pyarrow==5.0.0 From 392cb3c8bc01e301b86f6b55da1a0284e795e020 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 1 Sep 2021 23:02:15 +0200 Subject: [PATCH 119/338] chore(deps): update dependency pytest to v6.2.5 (#288) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [pytest](https://docs.pytest.org/en/latest/) ([source](https://togithub.com/pytest-dev/pytest), [changelog](https://docs.pytest.org/en/stable/changelog.html)) | `==6.2.4` -> `==6.2.5` | [![age](https://badges.renovateapi.com/packages/pypi/pytest/6.2.5/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/pytest/6.2.5/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/pytest/6.2.5/compatibility-slim/6.2.4)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/pytest/6.2.5/confidence-slim/6.2.4)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
pytest-dev/pytest ### [`v6.2.5`](https://togithub.com/pytest-dev/pytest/releases/6.2.5) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.2.4...6.2.5) # pytest 6.2.5 (2021-08-29) ## Trivial/Internal Changes - [#​8494](https://togithub.com/pytest-dev/pytest/issues/8494): Python 3.10 is now supported. - [#​9040](https://togithub.com/pytest-dev/pytest/issues/9040): Enable compatibility with `pluggy 1.0` or later.
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 95ea1e6a02b..927094516e6 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==6.2.4 +pytest==6.2.5 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 95ea1e6a02b..927094516e6 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==6.2.4 +pytest==6.2.5 From e7cd2df7c7b9e0000f876df4b819d9d621a816e5 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 2 Sep 2021 17:03:37 +0200 Subject: [PATCH 120/338] chore(deps): update all dependencies (#291) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index a4b9ec9e071..37ab479593a 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.6.3 +google-cloud-bigquery-storage==2.7.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index d47ff23deb9..85338b62e83 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==2.0.2 -google-cloud-bigquery-storage==2.6.3 -google-cloud-bigquery==2.24.1 +google-cloud-bigquery-storage==2.7.0 +google-cloud-bigquery==2.26.0 pyarrow==5.0.0 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' From f3e84e7fc0a6ba8da3bc8a63ec0e7ff2c3b0cb8a Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 10 Sep 2021 17:07:20 -0500 Subject: [PATCH 121/338] feat: add `BigQueryWriteClient` where `append_rows` returns a helper for writing rows (#284) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * WIP: write client sample * add sample with nullable types * add schema for all supported types * add complex types to code sample * refactor sample so that it can be tested * make test assertions more thorough * fix lint error * remove done TODO * address reviewer comments * fix tag mismatch * test on multiple regions * correct comments about why offset exists * upgrade g-c-b * WIP: invert stream using BiDi class * WIP: attempt to use Future for send instead * WIP: use futures, populated by background consumer * make sure stream is actually open before returning from open * copy close implementation from pub/sub * support extra metadata * process exceptions, add open timeout * sort imports * WIP: unit tests * drain futures when stream closes * update docs * add callbacks to detect when a stream fails * add unit tests * add sleep to loop waiting for RPC to be active * don't freeze if initial RPC fails * add needed initializations so done() functions * fail fast when there is a problem with the initial request * don't inherit concurrent.futures It's unnecessary and kept resulting in stuff getting stuck. * add unit test for open timeout * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * add manual client to docs * typo in sample comments * force timeout and metadata to be kwargs * unify interface for sending row data * pull stream name from merged request * require newer proto-plus for copy_from method Co-authored-by: Owl Bot --- bigquery_storage/snippets/__init__.py | 15 + .../snippets/append_rows_proto2.py | 249 +++++++++++ .../snippets/append_rows_proto2_test.py | 126 ++++++ bigquery_storage/snippets/conftest.py | 60 +++ bigquery_storage/snippets/noxfile.py | 266 +++++++++++ .../snippets/requirements-test.txt | 2 + bigquery_storage/snippets/requirements.txt | 3 + bigquery_storage/snippets/sample_data.proto | 61 +++ bigquery_storage/snippets/sample_data_pb2.py | 418 ++++++++++++++++++ .../snippets/sample_data_schema.json | 76 ++++ .../to_dataframe/requirements.txt | 1 + 11 files changed, 1277 insertions(+) create mode 100644 bigquery_storage/snippets/__init__.py create mode 100644 bigquery_storage/snippets/append_rows_proto2.py create mode 100644 bigquery_storage/snippets/append_rows_proto2_test.py create mode 100644 bigquery_storage/snippets/conftest.py create mode 100644 bigquery_storage/snippets/noxfile.py create mode 100644 bigquery_storage/snippets/requirements-test.txt create mode 100644 bigquery_storage/snippets/requirements.txt create mode 100644 bigquery_storage/snippets/sample_data.proto create mode 100644 bigquery_storage/snippets/sample_data_pb2.py create mode 100644 bigquery_storage/snippets/sample_data_schema.json diff --git a/bigquery_storage/snippets/__init__.py b/bigquery_storage/snippets/__init__.py new file mode 100644 index 00000000000..0098709d195 --- /dev/null +++ b/bigquery_storage/snippets/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/bigquery_storage/snippets/append_rows_proto2.py b/bigquery_storage/snippets/append_rows_proto2.py new file mode 100644 index 00000000000..d03909937e4 --- /dev/null +++ b/bigquery_storage/snippets/append_rows_proto2.py @@ -0,0 +1,249 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START bigquerystorage_append_rows_raw_proto2] +""" +This code sample demonstrates using the low-level generated client for Python. +""" + +import datetime +import decimal + +from google.cloud import bigquery_storage_v1beta2 +from google.cloud.bigquery_storage_v1beta2 import types +from google.cloud.bigquery_storage_v1beta2 import writer +from google.protobuf import descriptor_pb2 + +# If you make updates to the sample_data.proto protocol buffers definition, +# run: +# +# protoc --python_out=. sample_data.proto +# +# from the samples/snippets directory to generate the sample_data_pb2 module. +from . import sample_data_pb2 + + +def append_rows_proto2(project_id: str, dataset_id: str, table_id: str): + """Create a write stream, write some sample data, and commit the stream.""" + write_client = bigquery_storage_v1beta2.BigQueryWriteClient() + parent = write_client.table_path(project_id, dataset_id, table_id) + write_stream = types.WriteStream() + + # When creating the stream, choose the type. Use the PENDING type to wait + # until the stream is committed before it is visible. See: + # https://cloud.google.com/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1beta2#google.cloud.bigquery.storage.v1beta2.WriteStream.Type + write_stream.type_ = types.WriteStream.Type.PENDING + write_stream = write_client.create_write_stream( + parent=parent, write_stream=write_stream + ) + stream_name = write_stream.name + + # Create a template with fields needed for the first request. + request_template = types.AppendRowsRequest() + + # The initial request must contain the stream name. + request_template.write_stream = stream_name + + # So that BigQuery knows how to parse the serialized_rows, generate a + # protocol buffer representation of your message descriptor. + proto_schema = types.ProtoSchema() + proto_descriptor = descriptor_pb2.DescriptorProto() + sample_data_pb2.SampleData.DESCRIPTOR.CopyToProto(proto_descriptor) + proto_schema.proto_descriptor = proto_descriptor + proto_data = types.AppendRowsRequest.ProtoData() + proto_data.writer_schema = proto_schema + request_template.proto_rows = proto_data + + # Some stream types support an unbounded number of requests. Construct an + # AppendRowsStream to send an arbitrary number of requests to a stream. + append_rows_stream = writer.AppendRowsStream(write_client, request_template) + + # Create a batch of row data by appending proto2 serialized bytes to the + # serialized_rows repeated field. + proto_rows = types.ProtoRows() + + row = sample_data_pb2.SampleData() + row.row_num = 1 + row.bool_col = True + row.bytes_col = b"Hello, World!" + row.float64_col = float("+inf") + row.int64_col = 123 + row.string_col = "Howdy!" + proto_rows.serialized_rows.append(row.SerializeToString()) + + row = sample_data_pb2.SampleData() + row.row_num = 2 + row.bool_col = False + proto_rows.serialized_rows.append(row.SerializeToString()) + + row = sample_data_pb2.SampleData() + row.row_num = 3 + row.bytes_col = b"See you later!" + proto_rows.serialized_rows.append(row.SerializeToString()) + + row = sample_data_pb2.SampleData() + row.row_num = 4 + row.float64_col = 1000000.125 + proto_rows.serialized_rows.append(row.SerializeToString()) + + row = sample_data_pb2.SampleData() + row.row_num = 5 + row.int64_col = 67000 + proto_rows.serialized_rows.append(row.SerializeToString()) + + row = sample_data_pb2.SampleData() + row.row_num = 6 + row.string_col = "Auf Wiedersehen!" + proto_rows.serialized_rows.append(row.SerializeToString()) + + # Set an offset to allow resuming this stream if the connection breaks. + # Keep track of which requests the server has acknowledged and resume the + # stream at the first non-acknowledged message. If the server has already + # processed a message with that offset, it will return an ALREADY_EXISTS + # error, which can be safely ignored. + # + # The first request must always have an offset of 0. + request = types.AppendRowsRequest() + request.offset = 0 + proto_data = types.AppendRowsRequest.ProtoData() + proto_data.rows = proto_rows + request.proto_rows = proto_data + + response_future_1 = append_rows_stream.send(request) + + # Create a batch of rows containing scalar values that don't directly + # correspond to a protocol buffers scalar type. See the documentation for + # the expected data formats: + # https://cloud.google.com/bigquery/docs/write-api#data_type_conversions + proto_rows = types.ProtoRows() + + row = sample_data_pb2.SampleData() + row.row_num = 7 + date_value = datetime.date(2021, 8, 12) + epoch_value = datetime.date(1970, 1, 1) + delta = date_value - epoch_value + row.date_col = delta.days + proto_rows.serialized_rows.append(row.SerializeToString()) + + row = sample_data_pb2.SampleData() + row.row_num = 8 + datetime_value = datetime.datetime(2021, 8, 12, 9, 46, 23, 987456) + row.datetime_col = datetime_value.strftime("%Y-%m-%d %H:%M:%S.%f") + proto_rows.serialized_rows.append(row.SerializeToString()) + + row = sample_data_pb2.SampleData() + row.row_num = 9 + row.geography_col = "POINT(-122.347222 47.651111)" + proto_rows.serialized_rows.append(row.SerializeToString()) + + row = sample_data_pb2.SampleData() + row.row_num = 10 + numeric_value = decimal.Decimal("1.23456789101112e+6") + row.numeric_col = str(numeric_value) + bignumeric_value = decimal.Decimal("-1.234567891011121314151617181920e+16") + row.bignumeric_col = str(bignumeric_value) + proto_rows.serialized_rows.append(row.SerializeToString()) + + row = sample_data_pb2.SampleData() + row.row_num = 11 + time_value = datetime.time(11, 7, 48, 123456) + row.time_col = time_value.strftime("%H:%M:%S.%f") + proto_rows.serialized_rows.append(row.SerializeToString()) + + row = sample_data_pb2.SampleData() + row.row_num = 12 + timestamp_value = datetime.datetime( + 2021, 8, 12, 16, 11, 22, 987654, tzinfo=datetime.timezone.utc + ) + epoch_value = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc) + delta = timestamp_value - epoch_value + row.timestamp_col = int(delta.total_seconds()) * 1000000 + int(delta.microseconds) + proto_rows.serialized_rows.append(row.SerializeToString()) + + # Since this is the second request, you only need to include the row data. + # The name of the stream and protocol buffers DESCRIPTOR is only needed in + # the first request. + request = types.AppendRowsRequest() + proto_data = types.AppendRowsRequest.ProtoData() + proto_data.rows = proto_rows + request.proto_rows = proto_data + + # Offset must equal the number of rows that were previously sent. + request.offset = 6 + + response_future_2 = append_rows_stream.send(request) + + # Create a batch of rows with STRUCT and ARRAY BigQuery data types. In + # protocol buffers, these correspond to nested messages and repeated + # fields, respectively. + proto_rows = types.ProtoRows() + + row = sample_data_pb2.SampleData() + row.row_num = 13 + row.int64_list.append(1) + row.int64_list.append(2) + row.int64_list.append(3) + proto_rows.serialized_rows.append(row.SerializeToString()) + + row = sample_data_pb2.SampleData() + row.row_num = 14 + row.struct_col.sub_int_col = 7 + proto_rows.serialized_rows.append(row.SerializeToString()) + + row = sample_data_pb2.SampleData() + row.row_num = 15 + sub_message = sample_data_pb2.SampleData.SampleStruct() + sub_message.sub_int_col = -1 + row.struct_list.append(sub_message) + sub_message = sample_data_pb2.SampleData.SampleStruct() + sub_message.sub_int_col = -2 + row.struct_list.append(sub_message) + sub_message = sample_data_pb2.SampleData.SampleStruct() + sub_message.sub_int_col = -3 + row.struct_list.append(sub_message) + proto_rows.serialized_rows.append(row.SerializeToString()) + + request = types.AppendRowsRequest() + request.offset = 12 + proto_data = types.AppendRowsRequest.ProtoData() + proto_data.rows = proto_rows + request.proto_rows = proto_data + + # For each request sent, a message is expected in the responses iterable. + # This sample sends 3 requests, therefore expect exactly 3 responses. + response_future_3 = append_rows_stream.send(request) + + # All three requests are in-flight, wait for them to finish being processed + # before finalizing the stream. + print(response_future_1.result()) + print(response_future_2.result()) + print(response_future_3.result()) + + # Shutdown background threads and close the streaming connection. + append_rows_stream.close() + + # A PENDING type stream must be "finalized" before being committed. No new + # records can be written to the stream after this method has been called. + write_client.finalize_write_stream(name=write_stream.name) + + # Commit the stream you created earlier. + batch_commit_write_streams_request = types.BatchCommitWriteStreamsRequest() + batch_commit_write_streams_request.parent = parent + batch_commit_write_streams_request.write_streams = [write_stream.name] + write_client.batch_commit_write_streams(batch_commit_write_streams_request) + + print(f"Writes to stream: '{write_stream.name}' have been committed.") + + +# [END bigquerystorage_append_rows_raw_proto2] diff --git a/bigquery_storage/snippets/append_rows_proto2_test.py b/bigquery_storage/snippets/append_rows_proto2_test.py new file mode 100644 index 00000000000..dddda30129e --- /dev/null +++ b/bigquery_storage/snippets/append_rows_proto2_test.py @@ -0,0 +1,126 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import decimal +import pathlib +import random + +from google.cloud import bigquery +import pytest + +from . import append_rows_proto2 + + +DIR = pathlib.Path(__file__).parent + + +regions = ["US", "non-US"] + + +@pytest.fixture(params=regions) +def sample_data_table( + request: pytest.FixtureRequest, + bigquery_client: bigquery.Client, + project_id: str, + dataset_id: str, + dataset_id_non_us: str, +) -> str: + dataset = dataset_id + if request.param != "US": + dataset = dataset_id_non_us + schema = bigquery_client.schema_from_json(str(DIR / "sample_data_schema.json")) + table_id = f"append_rows_proto2_{random.randrange(10000)}" + full_table_id = f"{project_id}.{dataset}.{table_id}" + table = bigquery.Table(full_table_id, schema=schema) + table = bigquery_client.create_table(table, exists_ok=True) + yield full_table_id + bigquery_client.delete_table(table, not_found_ok=True) + + +def test_append_rows_proto2( + capsys: pytest.CaptureFixture, + bigquery_client: bigquery.Client, + sample_data_table: str, +): + project_id, dataset_id, table_id = sample_data_table.split(".") + append_rows_proto2.append_rows_proto2( + project_id=project_id, dataset_id=dataset_id, table_id=table_id + ) + out, _ = capsys.readouterr() + assert "have been committed" in out + + rows = bigquery_client.query( + f"SELECT * FROM `{project_id}.{dataset_id}.{table_id}`" + ).result() + row_items = [ + # Convert to sorted tuple of items, omitting NULL values, to make + # searching for expected rows easier. + tuple( + sorted( + item for item in row.items() if item[1] is not None and item[1] != [] + ) + ) + for row in rows + ] + + assert ( + ("bool_col", True), + ("bytes_col", b"Hello, World!"), + ("float64_col", float("+inf")), + ("int64_col", 123), + ("row_num", 1), + ("string_col", "Howdy!"), + ) in row_items + assert (("bool_col", False), ("row_num", 2)) in row_items + assert (("bytes_col", b"See you later!"), ("row_num", 3)) in row_items + assert (("float64_col", 1000000.125), ("row_num", 4)) in row_items + assert (("int64_col", 67000), ("row_num", 5)) in row_items + assert (("row_num", 6), ("string_col", "Auf Wiedersehen!")) in row_items + assert (("date_col", datetime.date(2021, 8, 12)), ("row_num", 7)) in row_items + assert ( + ("datetime_col", datetime.datetime(2021, 8, 12, 9, 46, 23, 987456)), + ("row_num", 8), + ) in row_items + assert ( + ("geography_col", "POINT(-122.347222 47.651111)"), + ("row_num", 9), + ) in row_items + assert ( + ("bignumeric_col", decimal.Decimal("-1.234567891011121314151617181920e+16")), + ("numeric_col", decimal.Decimal("1.23456789101112e+6")), + ("row_num", 10), + ) in row_items + assert ( + ("row_num", 11), + ("time_col", datetime.time(11, 7, 48, 123456)), + ) in row_items + assert ( + ("row_num", 12), + ( + "timestamp_col", + datetime.datetime( + 2021, 8, 12, 16, 11, 22, 987654, tzinfo=datetime.timezone.utc + ), + ), + ) in row_items + assert (("int64_list", [1, 2, 3]), ("row_num", 13)) in row_items + assert (("row_num", 14), ("struct_col", {"sub_int_col": 7}),) in row_items + assert ( + ("row_num", 15), + ( + "struct_list", + [{"sub_int_col": -1}, {"sub_int_col": -2}, {"sub_int_col": -3}], + ), + ) in row_items diff --git a/bigquery_storage/snippets/conftest.py b/bigquery_storage/snippets/conftest.py new file mode 100644 index 00000000000..531f0b9dc7e --- /dev/null +++ b/bigquery_storage/snippets/conftest.py @@ -0,0 +1,60 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud import bigquery +import pytest +import test_utils.prefixer + + +prefixer = test_utils.prefixer.Prefixer("python-bigquery-storage", "samples/snippets") + + +@pytest.fixture(scope="session", autouse=True) +def cleanup_datasets(bigquery_client: bigquery.Client): + for dataset in bigquery_client.list_datasets(): + if prefixer.should_cleanup(dataset.dataset_id): + bigquery_client.delete_dataset( + dataset, delete_contents=True, not_found_ok=True + ) + + +@pytest.fixture(scope="session") +def bigquery_client(): + return bigquery.Client() + + +@pytest.fixture(scope="session") +def project_id(bigquery_client): + return bigquery_client.project + + +@pytest.fixture(scope="session") +def dataset_id(bigquery_client: bigquery.Client, project_id: str): + dataset_id = prefixer.create_prefix() + full_dataset_id = f"{project_id}.{dataset_id}" + dataset = bigquery.Dataset(full_dataset_id) + bigquery_client.create_dataset(dataset) + yield dataset_id + bigquery_client.delete_dataset(dataset, delete_contents=True, not_found_ok=True) + + +@pytest.fixture(scope="session") +def dataset_id_non_us(bigquery_client: bigquery.Client, project_id: str): + dataset_id = prefixer.create_prefix() + full_dataset_id = f"{project_id}.{dataset_id}" + dataset = bigquery.Dataset(full_dataset_id) + dataset.location = "asia-northeast1" + bigquery_client.create_dataset(dataset) + yield dataset_id + bigquery_client.delete_dataset(dataset, delete_contents=True, not_found_ok=True) diff --git a/bigquery_storage/snippets/noxfile.py b/bigquery_storage/snippets/noxfile.py new file mode 100644 index 00000000000..b008613f03f --- /dev/null +++ b/bigquery_storage/snippets/noxfile.py @@ -0,0 +1,266 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys +from typing import Callable, Dict, List, Optional + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +BLACK_VERSION = "black==19.10b0" + +# Copy `noxfile_config.py` to your directory and modify it instead. + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars() -> Dict[str, str]: + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir: str) -> List[str]: + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + ".", + ] + session.run("flake8", *args) + + +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session: nox.sessions.Session) -> None: + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root() -> Optional[str]: + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session: nox.sessions.Session, path: str) -> None: + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt new file mode 100644 index 00000000000..85597665d21 --- /dev/null +++ b/bigquery_storage/snippets/requirements-test.txt @@ -0,0 +1,2 @@ +google-cloud-testutils==1.0.0 +pytest==6.2.4 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt new file mode 100644 index 00000000000..8df538bbcc2 --- /dev/null +++ b/bigquery_storage/snippets/requirements.txt @@ -0,0 +1,3 @@ +google-cloud-bigquery-storage==2.6.2 +google-cloud-bigquery==2.24.1 +protobuf==3.17.3 diff --git a/bigquery_storage/snippets/sample_data.proto b/bigquery_storage/snippets/sample_data.proto new file mode 100644 index 00000000000..3e9f19cefff --- /dev/null +++ b/bigquery_storage/snippets/sample_data.proto @@ -0,0 +1,61 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// [START bigquerystorage_append_rows_raw_proto2_definition] +// The BigQuery Storage API expects protocol buffer data to be encoded in the +// proto2 wire format. This allows it to disambiguate missing optional fields +// from default values without the need for wrapper types. +syntax = "proto2"; + +// Define a message type representing the rows in your table. The message +// cannot contain fields which are not present in the table. +message SampleData { + // Use a nested message to encode STRUCT column values. + // + // References to external messages are not allowed. Any message definitions + // must be nested within the root message representing row data. + message SampleStruct { + optional int64 sub_int_col = 1; + } + + // The following types map directly between protocol buffers and their + // corresponding BigQuery data types. + optional bool bool_col = 1; + optional bytes bytes_col = 2; + optional double float64_col = 3; + optional int64 int64_col = 4; + optional string string_col = 5; + + // The following data types require some encoding to use. See the + // documentation for the expected data formats: + // https://cloud.google.com/bigquery/docs/write-api#data_type_conversion + optional int32 date_col = 6; + optional string datetime_col = 7; + optional string geography_col = 8; + optional string numeric_col = 9; + optional string bignumeric_col = 10; + optional string time_col = 11; + optional int64 timestamp_col = 12; + + // Use a repeated field to represent a BigQuery ARRAY value. + repeated int64 int64_list = 13; + + // Use a nested message to encode STRUCT and ARRAY values. + optional SampleStruct struct_col = 14; + repeated SampleStruct struct_list = 15; + + // Use the required keyword for client-side validation of required fields. + required int64 row_num = 16; +} +// [END bigquerystorage_append_rows_raw_proto2_definition] diff --git a/bigquery_storage/snippets/sample_data_pb2.py b/bigquery_storage/snippets/sample_data_pb2.py new file mode 100644 index 00000000000..ba524988a36 --- /dev/null +++ b/bigquery_storage/snippets/sample_data_pb2.py @@ -0,0 +1,418 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: sample_data.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="sample_data.proto", + package="", + syntax="proto2", + serialized_options=None, + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n\x11sample_data.proto"\xa9\x03\n\nSampleData\x12\x10\n\x08\x62ool_col\x18\x01 \x01(\x08\x12\x11\n\tbytes_col\x18\x02 \x01(\x0c\x12\x13\n\x0b\x66loat64_col\x18\x03 \x01(\x01\x12\x11\n\tint64_col\x18\x04 \x01(\x03\x12\x12\n\nstring_col\x18\x05 \x01(\t\x12\x10\n\x08\x64\x61te_col\x18\x06 \x01(\x05\x12\x14\n\x0c\x64\x61tetime_col\x18\x07 \x01(\t\x12\x15\n\rgeography_col\x18\x08 \x01(\t\x12\x13\n\x0bnumeric_col\x18\t \x01(\t\x12\x16\n\x0e\x62ignumeric_col\x18\n \x01(\t\x12\x10\n\x08time_col\x18\x0b \x01(\t\x12\x15\n\rtimestamp_col\x18\x0c \x01(\x03\x12\x12\n\nint64_list\x18\r \x03(\x03\x12,\n\nstruct_col\x18\x0e \x01(\x0b\x32\x18.SampleData.SampleStruct\x12-\n\x0bstruct_list\x18\x0f \x03(\x0b\x32\x18.SampleData.SampleStruct\x12\x0f\n\x07row_num\x18\x10 \x02(\x03\x1a#\n\x0cSampleStruct\x12\x13\n\x0bsub_int_col\x18\x01 \x01(\x03', +) + + +_SAMPLEDATA_SAMPLESTRUCT = _descriptor.Descriptor( + name="SampleStruct", + full_name="SampleData.SampleStruct", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="sub_int_col", + full_name="SampleData.SampleStruct.sub_int_col", + index=0, + number=1, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=412, + serialized_end=447, +) + +_SAMPLEDATA = _descriptor.Descriptor( + name="SampleData", + full_name="SampleData", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="bool_col", + full_name="SampleData.bool_col", + index=0, + number=1, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="bytes_col", + full_name="SampleData.bytes_col", + index=1, + number=2, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"", + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="float64_col", + full_name="SampleData.float64_col", + index=2, + number=3, + type=1, + cpp_type=5, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="int64_col", + full_name="SampleData.int64_col", + index=3, + number=4, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="string_col", + full_name="SampleData.string_col", + index=4, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="date_col", + full_name="SampleData.date_col", + index=5, + number=6, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="datetime_col", + full_name="SampleData.datetime_col", + index=6, + number=7, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="geography_col", + full_name="SampleData.geography_col", + index=7, + number=8, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="numeric_col", + full_name="SampleData.numeric_col", + index=8, + number=9, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="bignumeric_col", + full_name="SampleData.bignumeric_col", + index=9, + number=10, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="time_col", + full_name="SampleData.time_col", + index=10, + number=11, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="timestamp_col", + full_name="SampleData.timestamp_col", + index=11, + number=12, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="int64_list", + full_name="SampleData.int64_list", + index=12, + number=13, + type=3, + cpp_type=2, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="struct_col", + full_name="SampleData.struct_col", + index=13, + number=14, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="struct_list", + full_name="SampleData.struct_list", + index=14, + number=15, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="row_num", + full_name="SampleData.row_num", + index=15, + number=16, + type=3, + cpp_type=2, + label=2, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[_SAMPLEDATA_SAMPLESTRUCT,], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=22, + serialized_end=447, +) + +_SAMPLEDATA_SAMPLESTRUCT.containing_type = _SAMPLEDATA +_SAMPLEDATA.fields_by_name["struct_col"].message_type = _SAMPLEDATA_SAMPLESTRUCT +_SAMPLEDATA.fields_by_name["struct_list"].message_type = _SAMPLEDATA_SAMPLESTRUCT +DESCRIPTOR.message_types_by_name["SampleData"] = _SAMPLEDATA +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +SampleData = _reflection.GeneratedProtocolMessageType( + "SampleData", + (_message.Message,), + { + "SampleStruct": _reflection.GeneratedProtocolMessageType( + "SampleStruct", + (_message.Message,), + { + "DESCRIPTOR": _SAMPLEDATA_SAMPLESTRUCT, + "__module__": "sample_data_pb2" + # @@protoc_insertion_point(class_scope:SampleData.SampleStruct) + }, + ), + "DESCRIPTOR": _SAMPLEDATA, + "__module__": "sample_data_pb2" + # @@protoc_insertion_point(class_scope:SampleData) + }, +) +_sym_db.RegisterMessage(SampleData) +_sym_db.RegisterMessage(SampleData.SampleStruct) + + +# @@protoc_insertion_point(module_scope) diff --git a/bigquery_storage/snippets/sample_data_schema.json b/bigquery_storage/snippets/sample_data_schema.json new file mode 100644 index 00000000000..ba6ba102261 --- /dev/null +++ b/bigquery_storage/snippets/sample_data_schema.json @@ -0,0 +1,76 @@ + +[ + { + "name": "bool_col", + "type": "BOOLEAN" + }, + { + "name": "bytes_col", + "type": "BYTES" + }, + { + "name": "date_col", + "type": "DATE" + }, + { + "name": "datetime_col", + "type": "DATETIME" + }, + { + "name": "float64_col", + "type": "FLOAT" + }, + { + "name": "geography_col", + "type": "GEOGRAPHY" + }, + { + "name": "int64_col", + "type": "INTEGER" + }, + { + "name": "numeric_col", + "type": "NUMERIC" + }, + { + "name": "bignumeric_col", + "type": "BIGNUMERIC" + }, + { + "name": "row_num", + "type": "INTEGER", + "mode": "REQUIRED" + }, + { + "name": "string_col", + "type": "STRING" + }, + { + "name": "time_col", + "type": "TIME" + }, + { + "name": "timestamp_col", + "type": "TIMESTAMP" + }, + { + "name": "int64_list", + "type": "INTEGER", + "mode": "REPEATED" + }, + { + "name": "struct_col", + "type": "RECORD", + "fields": [ + {"name": "sub_int_col", "type": "INTEGER"} + ] + }, + { + "name": "struct_list", + "type": "RECORD", + "fields": [ + {"name": "sub_int_col", "type": "INTEGER"} + ], + "mode": "REPEATED" + } + ] diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 85338b62e83..063a02b258a 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -6,3 +6,4 @@ ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' pandas==1.2.5; python_version > '3.6' pandas==1.1.5; python_version <= '3.6' +tqdm==4.62.1 From 97ed04b220a8d61099608953d947877ae61bd482 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Mon, 27 Sep 2021 12:10:21 -0500 Subject: [PATCH 122/338] feat: add `AppendRowsStream` to use write API from v1 endpoint (#309) This is just a duplicate of the class in the v1beta2 endpoint. I see for reads we tried to be clever by using the v1 version from the v1beta2 endpoint, but it would be harder to do with the write API. The `initial_request_template` parameter means that we need to make sure for certain that we are using the generated types for the correct endpoint. Since "beta" is clearly in the endpoint and import name, I think leaving the v1beta2 writer module as-is, with additional features and fixes only added to v1 makes some sense. Alternatively, we could add some tests to ensure these classes stay in sync? --- bigquery_storage/snippets/append_rows_proto2.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/snippets/append_rows_proto2.py b/bigquery_storage/snippets/append_rows_proto2.py index d03909937e4..a06c29d70de 100644 --- a/bigquery_storage/snippets/append_rows_proto2.py +++ b/bigquery_storage/snippets/append_rows_proto2.py @@ -20,9 +20,9 @@ import datetime import decimal -from google.cloud import bigquery_storage_v1beta2 -from google.cloud.bigquery_storage_v1beta2 import types -from google.cloud.bigquery_storage_v1beta2 import writer +from google.cloud import bigquery_storage_v1 +from google.cloud.bigquery_storage_v1 import types +from google.cloud.bigquery_storage_v1 import writer from google.protobuf import descriptor_pb2 # If you make updates to the sample_data.proto protocol buffers definition, @@ -36,13 +36,13 @@ def append_rows_proto2(project_id: str, dataset_id: str, table_id: str): """Create a write stream, write some sample data, and commit the stream.""" - write_client = bigquery_storage_v1beta2.BigQueryWriteClient() + write_client = bigquery_storage_v1.BigQueryWriteClient() parent = write_client.table_path(project_id, dataset_id, table_id) write_stream = types.WriteStream() # When creating the stream, choose the type. Use the PENDING type to wait # until the stream is committed before it is visible. See: - # https://cloud.google.com/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1beta2#google.cloud.bigquery.storage.v1beta2.WriteStream.Type + # https://cloud.google.com/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1#google.cloud.bigquery.storage.v1.WriteStream.Type write_stream.type_ = types.WriteStream.Type.PENDING write_stream = write_client.create_write_stream( parent=parent, write_stream=write_stream From 75bb82a6c5b1c328a80216ab5fe0b63d44e22c42 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 27 Sep 2021 19:58:15 +0200 Subject: [PATCH 123/338] chore(deps): update all dependencies (#298) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-auth](https://togithub.com/googleapis/google-auth-library-python) | `==2.0.2` -> `==2.2.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-auth/2.2.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-auth/2.2.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-auth/2.2.0/compatibility-slim/2.0.2)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-auth/2.2.0/confidence-slim/2.0.2)](https://docs.renovatebot.com/merge-confidence/) | | [google-cloud-bigquery](https://togithub.com/googleapis/python-bigquery) | `==2.26.0` -> `==2.27.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.27.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.27.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.27.0/compatibility-slim/2.26.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.27.0/confidence-slim/2.26.0)](https://docs.renovatebot.com/merge-confidence/) | | [google-cloud-bigquery](https://togithub.com/googleapis/python-bigquery) | `==2.24.1` -> `==2.27.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.27.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.27.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.27.0/compatibility-slim/2.24.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.27.0/confidence-slim/2.24.1)](https://docs.renovatebot.com/merge-confidence/) | | [google-cloud-bigquery-storage](https://togithub.com/googleapis/python-bigquery-storage) | `==2.7.0` -> `==2.8.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery-storage/2.8.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery-storage/2.8.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery-storage/2.8.0/compatibility-slim/2.7.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery-storage/2.8.0/confidence-slim/2.7.0)](https://docs.renovatebot.com/merge-confidence/) | | [google-cloud-bigquery-storage](https://togithub.com/googleapis/python-bigquery-storage) | `==2.6.2` -> `==2.8.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery-storage/2.8.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery-storage/2.8.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery-storage/2.8.0/compatibility-slim/2.6.2)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery-storage/2.8.0/confidence-slim/2.6.2)](https://docs.renovatebot.com/merge-confidence/) | | [google-cloud-testutils](https://togithub.com/googleapis/python-test-utils) | `==1.0.0` -> `==1.1.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-testutils/1.1.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-testutils/1.1.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-testutils/1.1.0/compatibility-slim/1.0.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-testutils/1.1.0/confidence-slim/1.0.0)](https://docs.renovatebot.com/merge-confidence/) | | [protobuf](https://developers.google.com/protocol-buffers/) | `==3.17.3` -> `==3.18.0` | [![age](https://badges.renovateapi.com/packages/pypi/protobuf/3.18.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/protobuf/3.18.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/protobuf/3.18.0/compatibility-slim/3.17.3)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/protobuf/3.18.0/confidence-slim/3.17.3)](https://docs.renovatebot.com/merge-confidence/) | | [pytest](https://docs.pytest.org/en/latest/) ([source](https://togithub.com/pytest-dev/pytest), [changelog](https://docs.pytest.org/en/stable/changelog.html)) | `==6.2.4` -> `==6.2.5` | [![age](https://badges.renovateapi.com/packages/pypi/pytest/6.2.5/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/pytest/6.2.5/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/pytest/6.2.5/compatibility-slim/6.2.4)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/pytest/6.2.5/confidence-slim/6.2.4)](https://docs.renovatebot.com/merge-confidence/) | | [tqdm](https://tqdm.github.io) ([source](https://togithub.com/tqdm/tqdm), [changelog](https://tqdm.github.io/releases)) | `==4.62.1` -> `==4.62.3` | [![age](https://badges.renovateapi.com/packages/pypi/tqdm/4.62.3/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/tqdm/4.62.3/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/tqdm/4.62.3/compatibility-slim/4.62.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/tqdm/4.62.3/confidence-slim/4.62.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/google-auth-library-python ### [`v2.2.0`](https://togithub.com/googleapis/google-auth-library-python/blob/master/CHANGELOG.md#​220-httpswwwgithubcomgoogleapisgoogle-auth-library-pythoncomparev210v220-2021-09-21) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v2.1.0...v2.2.0) ##### Features - add support for workforce pool credentials ([#​868](https://www.togithub.com/googleapis/google-auth-library-python/issues/868)) ([993bab2](https://www.github.com/googleapis/google-auth-library-python/commit/993bab2aaacf3034e09d9f0f25d36c0e815d3a29)) ### [`v2.1.0`](https://togithub.com/googleapis/google-auth-library-python/blob/master/CHANGELOG.md#​210-httpswwwgithubcomgoogleapisgoogle-auth-library-pythoncomparev202v210-2021-09-10) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v2.0.2...v2.1.0) ##### Features - Improve handling of clock skew ([#​858](https://www.togithub.com/googleapis/google-auth-library-python/issues/858)) ([45c4491](https://www.github.com/googleapis/google-auth-library-python/commit/45c4491fb971c9edf590b27b9e271b7a23a1bba6)) ##### Bug Fixes - add SAML challenge to reauth ([#​819](https://www.togithub.com/googleapis/google-auth-library-python/issues/819)) ([13aed5f](https://www.github.com/googleapis/google-auth-library-python/commit/13aed5ffe3ba435004ab48202462452f04d7cb29)) - disable warning if quota project id provided to auth.default() ([#​856](https://www.togithub.com/googleapis/google-auth-library-python/issues/856)) ([11ebaeb](https://www.github.com/googleapis/google-auth-library-python/commit/11ebaeb9d7c0862916154cfb810238574507629a)) - rename CLOCK_SKEW and separate client/server user case ([#​863](https://www.togithub.com/googleapis/google-auth-library-python/issues/863)) ([738611b](https://www.github.com/googleapis/google-auth-library-python/commit/738611bd2914f0fd5fa8b49b65f56ef321829c85)) ##### [2.0.2](https://www.github.com/googleapis/google-auth-library-python/compare/v2.0.1...v2.0.2) (2021-08-25) ##### Bug Fixes - use 'int.to_bytes' rather than deprecated crypto wrapper ([#​848](https://www.togithub.com/googleapis/google-auth-library-python/issues/848)) ([b79b554](https://www.github.com/googleapis/google-auth-library-python/commit/b79b55407b31933c9a8fe6de01478fa00a33fa2b)) - use int.from_bytes ([#​846](https://www.togithub.com/googleapis/google-auth-library-python/issues/846)) ([466aed9](https://www.github.com/googleapis/google-auth-library-python/commit/466aed99f5c2ba15d2036fa21cc83b3f0fc22639)) ##### [2.0.1](https://www.github.com/googleapis/google-auth-library-python/compare/v2.0.0...v2.0.1) (2021-08-17) ##### Bug Fixes - normalize AWS paths correctly on windows ([#​842](https://www.togithub.com/googleapis/google-auth-library-python/issues/842)) ([4e0fb1c](https://www.github.com/googleapis/google-auth-library-python/commit/4e0fb1cee78ee56b878b6e12be3b3c58df242b05))
googleapis/python-bigquery ### [`v2.27.0`](https://togithub.com/googleapis/python-bigquery/blob/master/CHANGELOG.md#​2270-httpswwwgithubcomgoogleapispython-bigquerycomparev2260v2270-2021-09-24) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.26.0...v2.27.0) ##### Features - Add py.typed for PEP 561 compliance ([#​976](https://www.togithub.com/googleapis/python-bigquery/issues/976)) ([96e6bee](https://www.github.com/googleapis/python-bigquery/commit/96e6beef3c63b663b7e5879b1458f4dd1a47a5b5)) - include key metadata in Job representation ([#​964](https://www.togithub.com/googleapis/python-bigquery/issues/964)) ([acca1cb](https://www.github.com/googleapis/python-bigquery/commit/acca1cb7baaa3b00508246c994ade40314d421c3)) ##### Bug Fixes - Arrow extension-type metadata was not set when calling the REST API or when there are no rows ([#​946](https://www.togithub.com/googleapis/python-bigquery/issues/946)) ([864383b](https://www.github.com/googleapis/python-bigquery/commit/864383bc01636b3774f7da194587b8b7edd0383d)) - disambiguate missing policy tags from explicitly unset policy tags ([#​983](https://www.togithub.com/googleapis/python-bigquery/issues/983)) ([f83c00a](https://www.github.com/googleapis/python-bigquery/commit/f83c00acead70fc0ce9959eefb133a672d816277)) - remove default timeout ([#​974](https://www.togithub.com/googleapis/python-bigquery/issues/974)) ([1cef0d4](https://www.github.com/googleapis/python-bigquery/commit/1cef0d4664bf448168b26487a71795144b7f4d6b)) ##### Documentation - simplify destination table sample with f-strings ([#​966](https://www.togithub.com/googleapis/python-bigquery/issues/966)) ([ab6e76f](https://www.github.com/googleapis/python-bigquery/commit/ab6e76f9489262fd9c1876a1c4f93d7e139aa999))
googleapis/python-bigquery-storage ### [`v2.8.0`](https://togithub.com/googleapis/python-bigquery-storage/blob/master/CHANGELOG.md#​280-httpswwwgithubcomgoogleapispython-bigquery-storagecomparev270v280-2021-09-10) [Compare Source](https://togithub.com/googleapis/python-bigquery-storage/compare/v2.7.0...v2.8.0) ##### Features - add `AppendRowsStream` helper to append rows with a `BigQueryWriteClient` ([#​284](https://www.togithub.com/googleapis/python-bigquery-storage/issues/284)) ([2461f63](https://www.github.com/googleapis/python-bigquery-storage/commit/2461f63d37f707c2d634a95d87b8ffc3e4af3686))
googleapis/python-test-utils ### [`v1.1.0`](https://togithub.com/googleapis/python-test-utils/blob/master/CHANGELOG.md#​110-httpswwwgithubcomgoogleapispython-test-utilscomparev100v110-2021-08-30) [Compare Source](https://togithub.com/googleapis/python-test-utils/compare/v1.0.0...v1.1.0) ##### Features - add 'orchestrate' module ([#​54](https://www.togithub.com/googleapis/python-test-utils/issues/54)) ([ae3da1a](https://www.github.com/googleapis/python-test-utils/commit/ae3da1ab4e7cbf268d6dce60cb467ca7ed6c2c89))
pytest-dev/pytest ### [`v6.2.5`](https://togithub.com/pytest-dev/pytest/releases/6.2.5) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.2.4...6.2.5) # pytest 6.2.5 (2021-08-29) ## Trivial/Internal Changes - [#​8494](https://togithub.com/pytest-dev/pytest/issues/8494): Python 3.10 is now supported. - [#​9040](https://togithub.com/pytest-dev/pytest/issues/9040): Enable compatibility with `pluggy 1.0` or later.
tqdm/tqdm ### [`v4.62.3`](https://togithub.com/tqdm/tqdm/releases/v4.62.3) [Compare Source](https://togithub.com/tqdm/tqdm/compare/v4.62.2...v4.62.3) - fix minor typo ([#​1246](https://togithub.com/tqdm/tqdm/issues/1246)) - minor example fix ([#​1246](https://togithub.com/tqdm/tqdm/issues/1246)) - misc tidying & refactoring - misc build/dev framework updates - update dependencies - update linters - update docs deployment branches - misc test/ci updates - test forks - tidy OS & Python version tests - bump primary python version 3.7 => 3.8 - beta py3.10 testing - fix py2.7 tests - better timeout handling ### [`v4.62.2`](https://togithub.com/tqdm/tqdm/releases/v4.62.2) [Compare Source](https://togithub.com/tqdm/tqdm/compare/v4.62.1...v4.62.2) - fix notebook memory leak ([#​1216](https://togithub.com/tqdm/tqdm/issues/1216)) - fix `contrib.concurrent` with generators ([#​1233](https://togithub.com/tqdm/tqdm/issues/1233) <- [#​1231](https://togithub.com/tqdm/tqdm/issues/1231))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 👻 **Immortal**: This PR will be recreated if closed unmerged. Get [config help](https://togithub.com/renovatebot/renovate/discussions) if that's undesired. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 4 ++-- bigquery_storage/snippets/requirements.txt | 6 +++--- bigquery_storage/to_dataframe/requirements.txt | 8 ++++---- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 37ab479593a..0891ed5ae9a 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.7.0 +google-cloud-bigquery-storage==2.8.0 diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 85597665d21..3bb560ddf8b 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ -google-cloud-testutils==1.0.0 -pytest==6.2.4 +google-cloud-testutils==1.1.0 +pytest==6.2.5 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 8df538bbcc2..858829aef90 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-bigquery-storage==2.6.2 -google-cloud-bigquery==2.24.1 -protobuf==3.17.3 +google-cloud-bigquery-storage==2.8.0 +google-cloud-bigquery==2.27.0 +protobuf==3.18.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 063a02b258a..1ca261ea7a7 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,9 +1,9 @@ -google-auth==2.0.2 -google-cloud-bigquery-storage==2.7.0 -google-cloud-bigquery==2.26.0 +google-auth==2.2.0 +google-cloud-bigquery-storage==2.8.0 +google-cloud-bigquery==2.27.0 pyarrow==5.0.0 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' pandas==1.2.5; python_version > '3.6' pandas==1.1.5; python_version <= '3.6' -tqdm==4.62.1 +tqdm==4.62.3 From 4e0b763d92daef417b51ab5bf3495caec9bd59cd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 28 Sep 2021 17:18:37 +0200 Subject: [PATCH 124/338] chore(deps): update all dependencies (#313) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 4 ++-- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 0891ed5ae9a..a3ecaa6ebea 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.8.0 +google-cloud-bigquery-storage==2.9.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 858829aef90..3c17a14ccdc 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-bigquery-storage==2.8.0 -google-cloud-bigquery==2.27.0 +google-cloud-bigquery-storage==2.9.0 +google-cloud-bigquery==2.27.1 protobuf==3.18.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 1ca261ea7a7..39ba5dbf3c3 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==2.2.0 -google-cloud-bigquery-storage==2.8.0 -google-cloud-bigquery==2.27.0 +google-cloud-bigquery-storage==2.9.0 +google-cloud-bigquery==2.27.1 pyarrow==5.0.0 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' From 666dfe6d566a41fc2110927b0ef77fb8b0ea43ce Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 29 Sep 2021 20:32:31 +0200 Subject: [PATCH 125/338] chore(deps): update dependency google-auth to v2.2.1 (#314) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 39ba5dbf3c3..72809a371ac 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.2.0 +google-auth==2.2.1 google-cloud-bigquery-storage==2.9.0 google-cloud-bigquery==2.27.1 pyarrow==5.0.0 From 3e4f7a576605d27151ad814794cac095265f17e9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Sep 2021 16:56:11 +0000 Subject: [PATCH 126/338] chore: fail samples nox session if python version is missing (#316) --- bigquery_storage/quickstart/noxfile.py | 4 ++++ bigquery_storage/snippets/noxfile.py | 4 ++++ bigquery_storage/to_dataframe/noxfile.py | 4 ++++ 3 files changed, 12 insertions(+) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index b008613f03f..1fd8956fbf0 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -98,6 +98,10 @@ def get_pytest_env_vars() -> Dict[str, str]: "True", "true", ) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # diff --git a/bigquery_storage/snippets/noxfile.py b/bigquery_storage/snippets/noxfile.py index b008613f03f..1fd8956fbf0 100644 --- a/bigquery_storage/snippets/noxfile.py +++ b/bigquery_storage/snippets/noxfile.py @@ -98,6 +98,10 @@ def get_pytest_env_vars() -> Dict[str, str]: "True", "true", ) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index b008613f03f..1fd8956fbf0 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -98,6 +98,10 @@ def get_pytest_env_vars() -> Dict[str, str]: "True", "true", ) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # From 05be3c228be033ef37661c885eef6b8b2453c89b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 1 Oct 2021 12:18:50 +0200 Subject: [PATCH 127/338] chore(deps): update dependency google-cloud-bigquery to v2.28.0 (#317) --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 3c17a14ccdc..09bdb5eeed0 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ google-cloud-bigquery-storage==2.9.0 -google-cloud-bigquery==2.27.1 +google-cloud-bigquery==2.28.0 protobuf==3.18.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 72809a371ac..aa993c31335 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==2.2.1 google-cloud-bigquery-storage==2.9.0 -google-cloud-bigquery==2.27.1 +google-cloud-bigquery==2.28.0 pyarrow==5.0.0 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' From 898e8629bf5691493d1f3a79a79500823315ff8f Mon Sep 17 00:00:00 2001 From: Veronica Wasson <3992422+VeronicaWasson@users.noreply.github.com> Date: Tue, 5 Oct 2021 14:06:12 -0700 Subject: [PATCH 128/338] docs(samples): Add minimal sample to show Write API in pending mode (#322) This sample is a stripped down version of the bigquerystorage_append_rows_raw_proto2 sample, for embedding in the Write API documentation. The docs would then link to the longer sample which shows how to format all of the datatypes including STRUCT types. btw I registered a new region tag for this snippet --- .../snippets/append_rows_pending.py | 134 ++++++++++++++++++ .../snippets/append_rows_pending_test.py | 73 ++++++++++ .../snippets/customer_record.proto | 28 ++++ .../snippets/customer_record_pb2.py | 99 +++++++++++++ .../snippets/customer_record_schema.json | 11 ++ 5 files changed, 345 insertions(+) create mode 100644 bigquery_storage/snippets/append_rows_pending.py create mode 100644 bigquery_storage/snippets/append_rows_pending_test.py create mode 100644 bigquery_storage/snippets/customer_record.proto create mode 100644 bigquery_storage/snippets/customer_record_pb2.py create mode 100644 bigquery_storage/snippets/customer_record_schema.json diff --git a/bigquery_storage/snippets/append_rows_pending.py b/bigquery_storage/snippets/append_rows_pending.py new file mode 100644 index 00000000000..af780ffa5b9 --- /dev/null +++ b/bigquery_storage/snippets/append_rows_pending.py @@ -0,0 +1,134 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START bigquerystorage_append_rows_pending] +""" +This code sample demonstrates how to write records in pending mode +using the low-level generated client for Python. +""" + +from google.cloud import bigquery_storage_v1 +from google.cloud.bigquery_storage_v1 import types +from google.cloud.bigquery_storage_v1 import writer +from google.protobuf import descriptor_pb2 + +# If you update the customer_record.proto protocol buffer definition, run: +# +# protoc --python_out=. customer_record.proto +# +# from the samples/snippets directory to generate the customer_record_pb2.py module. +from . import customer_record_pb2 + + +def create_row_data(row_num: int, name: str): + row = customer_record_pb2.CustomerRecord() + row.row_num = row_num + row.customer_name = name + return row.SerializeToString() + + +def append_rows_pending(project_id: str, dataset_id: str, table_id: str): + + """Create a write stream, write some sample data, and commit the stream.""" + write_client = bigquery_storage_v1.BigQueryWriteClient() + parent = write_client.table_path(project_id, dataset_id, table_id) + write_stream = types.WriteStream() + + # When creating the stream, choose the type. Use the PENDING type to wait + # until the stream is committed before it is visible. See: + # https://cloud.google.com/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1#google.cloud.bigquery.storage.v1.WriteStream.Type + write_stream.type_ = types.WriteStream.Type.PENDING + write_stream = write_client.create_write_stream( + parent=parent, write_stream=write_stream + ) + stream_name = write_stream.name + + # Create a template with fields needed for the first request. + request_template = types.AppendRowsRequest() + + # The initial request must contain the stream name. + request_template.write_stream = stream_name + + # So that BigQuery knows how to parse the serialized_rows, generate a + # protocol buffer representation of your message descriptor. + proto_schema = types.ProtoSchema() + proto_descriptor = descriptor_pb2.DescriptorProto() + customer_record_pb2.CustomerRecord.DESCRIPTOR.CopyToProto(proto_descriptor) + proto_schema.proto_descriptor = proto_descriptor + proto_data = types.AppendRowsRequest.ProtoData() + proto_data.writer_schema = proto_schema + request_template.proto_rows = proto_data + + # Some stream types support an unbounded number of requests. Construct an + # AppendRowsStream to send an arbitrary number of requests to a stream. + append_rows_stream = writer.AppendRowsStream(write_client, request_template) + + # Create a batch of row data by appending proto2 serialized bytes to the + # serialized_rows repeated field. + proto_rows = types.ProtoRows() + proto_rows.serialized_rows.append(create_row_data(1, "Alice")) + proto_rows.serialized_rows.append(create_row_data(2, "Bob")) + + # Set an offset to allow resuming this stream if the connection breaks. + # Keep track of which requests the server has acknowledged and resume the + # stream at the first non-acknowledged message. If the server has already + # processed a message with that offset, it will return an ALREADY_EXISTS + # error, which can be safely ignored. + # + # The first request must always have an offset of 0. + request = types.AppendRowsRequest() + request.offset = 0 + proto_data = types.AppendRowsRequest.ProtoData() + proto_data.rows = proto_rows + request.proto_rows = proto_data + + response_future_1 = append_rows_stream.send(request) + + # Send another batch. + proto_rows = types.ProtoRows() + proto_rows.serialized_rows.append(create_row_data(3, "Charles")) + + # Since this is the second request, you only need to include the row data. + # The name of the stream and protocol buffers DESCRIPTOR is only needed in + # the first request. + request = types.AppendRowsRequest() + proto_data = types.AppendRowsRequest.ProtoData() + proto_data.rows = proto_rows + request.proto_rows = proto_data + + # Offset must equal the number of rows that were previously sent. + request.offset = 2 + + response_future_2 = append_rows_stream.send(request) + + print(response_future_1.result()) + print(response_future_2.result()) + + # Shutdown background threads and close the streaming connection. + append_rows_stream.close() + + # A PENDING type stream must be "finalized" before being committed. No new + # records can be written to the stream after this method has been called. + write_client.finalize_write_stream(name=write_stream.name) + + # Commit the stream you created earlier. + batch_commit_write_streams_request = types.BatchCommitWriteStreamsRequest() + batch_commit_write_streams_request.parent = parent + batch_commit_write_streams_request.write_streams = [write_stream.name] + write_client.batch_commit_write_streams(batch_commit_write_streams_request) + + print(f"Writes to stream: '{write_stream.name}' have been committed.") + + +# [END bigquerystorage_append_rows_pending] diff --git a/bigquery_storage/snippets/append_rows_pending_test.py b/bigquery_storage/snippets/append_rows_pending_test.py new file mode 100644 index 00000000000..dc0e690d61c --- /dev/null +++ b/bigquery_storage/snippets/append_rows_pending_test.py @@ -0,0 +1,73 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pathlib +import random + +from google.cloud import bigquery +import pytest + +from . import append_rows_pending + + +DIR = pathlib.Path(__file__).parent + + +regions = ["US", "non-US"] + + +@pytest.fixture(params=regions) +def sample_data_table( + request: pytest.FixtureRequest, + bigquery_client: bigquery.Client, + project_id: str, + dataset_id: str, + dataset_id_non_us: str, +) -> str: + dataset = dataset_id + if request.param != "US": + dataset = dataset_id_non_us + schema = bigquery_client.schema_from_json(str(DIR / "customer_record_schema.json")) + table_id = f"append_rows_proto2_{random.randrange(10000)}" + full_table_id = f"{project_id}.{dataset}.{table_id}" + table = bigquery.Table(full_table_id, schema=schema) + table = bigquery_client.create_table(table, exists_ok=True) + yield full_table_id + bigquery_client.delete_table(table, not_found_ok=True) + + +def test_append_rows_pending( + capsys: pytest.CaptureFixture, + bigquery_client: bigquery.Client, + sample_data_table: str, +): + project_id, dataset_id, table_id = sample_data_table.split(".") + append_rows_pending.append_rows_pending( + project_id=project_id, dataset_id=dataset_id, table_id=table_id + ) + out, _ = capsys.readouterr() + assert "have been committed" in out + + rows = bigquery_client.query( + f"SELECT * FROM `{project_id}.{dataset_id}.{table_id}`" + ).result() + row_items = [ + # Convert to sorted tuple of items to more easily search for expected rows. + tuple(sorted(row.items())) + for row in rows + ] + + assert (("customer_name", "Alice"), ("row_num", 1)) in row_items + assert (("customer_name", "Bob"), ("row_num", 2)) in row_items + assert (("customer_name", "Charles"), ("row_num", 3)) in row_items diff --git a/bigquery_storage/snippets/customer_record.proto b/bigquery_storage/snippets/customer_record.proto new file mode 100644 index 00000000000..06142c3b2e2 --- /dev/null +++ b/bigquery_storage/snippets/customer_record.proto @@ -0,0 +1,28 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// The BigQuery Storage API expects protocol buffer data to be encoded in the +// proto2 wire format. This allows it to disambiguate missing optional fields +// from default values without the need for wrapper types. +syntax = "proto2"; + +// Define a message type representing the rows in your table. The message +// cannot contain fields which are not present in the table. +message CustomerRecord { + + optional string customer_name = 1; + + // Use the required keyword for client-side validation of required fields. + required int64 row_num = 2; +} diff --git a/bigquery_storage/snippets/customer_record_pb2.py b/bigquery_storage/snippets/customer_record_pb2.py new file mode 100644 index 00000000000..14201ea912f --- /dev/null +++ b/bigquery_storage/snippets/customer_record_pb2.py @@ -0,0 +1,99 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: customer_record.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="customer_record.proto", + package="", + syntax="proto2", + serialized_options=None, + serialized_pb=_b( + '\n\x15\x63ustomer_record.proto"8\n\x0e\x43ustomerRecord\x12\x15\n\rcustomer_name\x18\x01 \x01(\t\x12\x0f\n\x07row_num\x18\x02 \x02(\x03' + ), +) + + +_CUSTOMERRECORD = _descriptor.Descriptor( + name="CustomerRecord", + full_name="CustomerRecord", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="customer_name", + full_name="CustomerRecord.customer_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="row_num", + full_name="CustomerRecord.row_num", + index=1, + number=2, + type=3, + cpp_type=2, + label=2, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto2", + extension_ranges=[], + oneofs=[], + serialized_start=25, + serialized_end=81, +) + +DESCRIPTOR.message_types_by_name["CustomerRecord"] = _CUSTOMERRECORD +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +CustomerRecord = _reflection.GeneratedProtocolMessageType( + "CustomerRecord", + (_message.Message,), + dict( + DESCRIPTOR=_CUSTOMERRECORD, + __module__="customer_record_pb2" + # @@protoc_insertion_point(class_scope:CustomerRecord) + ), +) +_sym_db.RegisterMessage(CustomerRecord) + + +# @@protoc_insertion_point(module_scope) diff --git a/bigquery_storage/snippets/customer_record_schema.json b/bigquery_storage/snippets/customer_record_schema.json new file mode 100644 index 00000000000..e04b31a7ead --- /dev/null +++ b/bigquery_storage/snippets/customer_record_schema.json @@ -0,0 +1,11 @@ +[ + { + "name": "customer_name", + "type": "STRING" + }, + { + "name": "row_num", + "type": "INTEGER", + "mode": "REQUIRED" + } +] From 7f8e2ce2be60b85ef961e9edb315b2980a0b4092 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 6 Oct 2021 16:31:04 +0200 Subject: [PATCH 129/338] chore(deps): update dependency protobuf to v3.18.1 (#324) --- bigquery_storage/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 09bdb5eeed0..a357f80597c 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ google-cloud-bigquery-storage==2.9.0 google-cloud-bigquery==2.28.0 -protobuf==3.18.0 +protobuf==3.18.1 From d29e0397fd357b97e27c758576ef7368c422c0b3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 7 Oct 2021 15:57:48 +0200 Subject: [PATCH 130/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.9.1 (#325) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index a3ecaa6ebea..ce53f8ed404 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.9.0 +google-cloud-bigquery-storage==2.9.1 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index a357f80597c..dbd83da5fc8 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-bigquery-storage==2.9.0 +google-cloud-bigquery-storage==2.9.1 google-cloud-bigquery==2.28.0 protobuf==3.18.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index aa993c31335..c8cd8616077 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.2.1 -google-cloud-bigquery-storage==2.9.0 +google-cloud-bigquery-storage==2.9.1 google-cloud-bigquery==2.28.0 pyarrow==5.0.0 ipython==7.24.0; python_version > '3.6' From 44fb71b0397a466ddaff222d1b1d66d6bf2e65ec Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 12 Oct 2021 20:21:56 +0200 Subject: [PATCH 131/338] chore(deps): update all dependencies (#327) Co-authored-by: Anthonios Partheniou --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index dbd83da5fc8..911b1b1c814 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ google-cloud-bigquery-storage==2.9.1 -google-cloud-bigquery==2.28.0 +google-cloud-bigquery==2.28.1 protobuf==3.18.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index c8cd8616077..07ff6279542 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ -google-auth==2.2.1 +google-auth==2.3.0 google-cloud-bigquery-storage==2.9.1 -google-cloud-bigquery==2.28.0 +google-cloud-bigquery==2.28.1 pyarrow==5.0.0 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' From c8c15b2d238d39dfec3c5eefc880fc1dc1a81d34 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 5 Nov 2021 10:26:33 +0100 Subject: [PATCH 132/338] chore(deps): update all dependencies (#335) Co-authored-by: Anthonios Partheniou --- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements.txt | 4 ++-- bigquery_storage/to_dataframe/requirements.txt | 6 +++--- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 3bb560ddf8b..6bf5f5ed947 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ -google-cloud-testutils==1.1.0 +google-cloud-testutils==1.2.0 pytest==6.2.5 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 911b1b1c814..42a3a0152b8 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ google-cloud-bigquery-storage==2.9.1 -google-cloud-bigquery==2.28.1 -protobuf==3.18.1 +google-cloud-bigquery==2.29.0 +protobuf==3.19.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 07ff6279542..738a00a75c1 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,7 +1,7 @@ -google-auth==2.3.0 +google-auth==2.3.2 google-cloud-bigquery-storage==2.9.1 -google-cloud-bigquery==2.28.1 -pyarrow==5.0.0 +google-cloud-bigquery==2.29.0 +pyarrow==6.0.0 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' pandas==1.2.5; python_version > '3.6' From d572487a1659e642511b136a0983ed35a7d554da Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 5 Nov 2021 05:55:44 -0400 Subject: [PATCH 133/338] chore(python): fix formatting issue in noxfile.py.j2 (#331) Source-Link: https://github.com/googleapis/synthtool/commit/0e85ed6ccf43fb433c03551205c9a186a2da1d4c Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:424d88d5d08ddd955782a4359559dc536e658db1a77416c9a4fff79df9519ad2 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- bigquery_storage/quickstart/noxfile.py | 2 +- bigquery_storage/snippets/noxfile.py | 2 +- bigquery_storage/to_dataframe/noxfile.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index 1fd8956fbf0..93a9122cc45 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -87,7 +87,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/bigquery_storage/snippets/noxfile.py b/bigquery_storage/snippets/noxfile.py index 1fd8956fbf0..93a9122cc45 100644 --- a/bigquery_storage/snippets/noxfile.py +++ b/bigquery_storage/snippets/noxfile.py @@ -87,7 +87,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index 1fd8956fbf0..93a9122cc45 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -87,7 +87,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From 0c76740b380dfd159ab8fc8177050e4fec11b302 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 5 Nov 2021 16:26:13 +0100 Subject: [PATCH 134/338] chore(deps): update all dependencies (#344) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 42a3a0152b8..8fb62b129c5 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ google-cloud-bigquery-storage==2.9.1 -google-cloud-bigquery==2.29.0 +google-cloud-bigquery==2.30.1 protobuf==3.19.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 738a00a75c1..6e94cf7f9ba 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ -google-auth==2.3.2 +google-auth==2.3.3 google-cloud-bigquery-storage==2.9.1 -google-cloud-bigquery==2.29.0 +google-cloud-bigquery==2.30.1 pyarrow==6.0.0 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' From da4c757a21fea9502dae69ad74f83fa25624a76a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 8 Nov 2021 20:28:36 +0100 Subject: [PATCH 135/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.10.0 (#347) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index ce53f8ed404..8a2ac6e044f 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.9.1 +google-cloud-bigquery-storage==2.10.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 8fb62b129c5..eb4c3460b1d 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-bigquery-storage==2.9.1 +google-cloud-bigquery-storage==2.10.0 google-cloud-bigquery==2.30.1 protobuf==3.19.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 6e94cf7f9ba..6bc141a656c 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.3.3 -google-cloud-bigquery-storage==2.9.1 +google-cloud-bigquery-storage==2.10.0 google-cloud-bigquery==2.30.1 pyarrow==6.0.0 ipython==7.24.0; python_version > '3.6' From 8afdf7a42b0f07582e4844be5fca0c9fa2205596 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 16 Nov 2021 18:19:06 +0100 Subject: [PATCH 136/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.10.1 (#353) Co-authored-by: Anthonios Partheniou --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 8a2ac6e044f..27d2f18adfe 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.10.0 +google-cloud-bigquery-storage==2.10.1 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index eb4c3460b1d..5c612ffc240 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-bigquery-storage==2.10.0 +google-cloud-bigquery-storage==2.10.1 google-cloud-bigquery==2.30.1 protobuf==3.19.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 6bc141a656c..3019630c83c 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.3.3 -google-cloud-bigquery-storage==2.10.0 +google-cloud-bigquery-storage==2.10.1 google-cloud-bigquery==2.30.1 pyarrow==6.0.0 ipython==7.24.0; python_version > '3.6' From 6dd56154fd8cbc568f24eb158aa1723457d13cae Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 18 Nov 2021 16:41:06 +0100 Subject: [PATCH 137/338] chore(deps): update dependency pyarrow to v6.0.1 (#357) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 3019630c83c..009a6864187 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,7 +1,7 @@ google-auth==2.3.3 google-cloud-bigquery-storage==2.10.1 google-cloud-bigquery==2.30.1 -pyarrow==6.0.0 +pyarrow==6.0.1 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' pandas==1.2.5; python_version > '3.6' From 59fc46072cb3dc4459ef1e5c0b362c890514ba60 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 3 Dec 2021 11:54:01 +0100 Subject: [PATCH 138/338] chore(deps): update all dependencies (#358) --- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 6bf5f5ed947..383d41f0a81 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ -google-cloud-testutils==1.2.0 +google-cloud-testutils==1.3.0 pytest==6.2.5 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 5c612ffc240..6586c02e740 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ google-cloud-bigquery-storage==2.10.1 -google-cloud-bigquery==2.30.1 +google-cloud-bigquery==2.31.0 protobuf==3.19.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 009a6864187..c077506cec0 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==2.3.3 google-cloud-bigquery-storage==2.10.1 -google-cloud-bigquery==2.30.1 +google-cloud-bigquery==2.31.0 pyarrow==6.0.1 ipython==7.24.0; python_version > '3.6' ipython==7.16.1; python_version <= '3.6' From 85a762d13c20510f975765ef2954930b22c58237 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 9 Dec 2021 15:03:12 -0800 Subject: [PATCH 139/338] chore: update python-docs-samples link to main branch (#362) Source-Link: https://github.com/googleapis/synthtool/commit/0941ef32b18aff0be34a40404f3971d9f51996e9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2f90537dd7df70f6b663cd654b1fa5dee483cf6a4edcfd46072b2775be8a23ec Co-authored-by: Owl Bot --- bigquery_storage/AUTHORING_GUIDE.md | 2 +- bigquery_storage/CONTRIBUTING.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/AUTHORING_GUIDE.md b/bigquery_storage/AUTHORING_GUIDE.md index 55c97b32f4c..8249522ffc2 100644 --- a/bigquery_storage/AUTHORING_GUIDE.md +++ b/bigquery_storage/AUTHORING_GUIDE.md @@ -1 +1 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/bigquery_storage/CONTRIBUTING.md b/bigquery_storage/CONTRIBUTING.md index 34c882b6f1a..f5fe2e6baf1 100644 --- a/bigquery_storage/CONTRIBUTING.md +++ b/bigquery_storage/CONTRIBUTING.md @@ -1 +1 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/CONTRIBUTING.md \ No newline at end of file From 47cb3fdf03de0983abc226bdfb6d71e3467689ef Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 6 Jan 2022 21:18:15 +0100 Subject: [PATCH 140/338] chore(deps): update dependency google-cloud-testutils to v1.3.1 (#359) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-testutils](https://togithub.com/googleapis/python-test-utils) | `==1.3.0` -> `==1.3.1` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-testutils/1.3.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-testutils/1.3.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-testutils/1.3.1/compatibility-slim/1.3.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-testutils/1.3.1/confidence-slim/1.3.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-test-utils ### [`v1.3.1`](https://togithub.com/googleapis/python-test-utils/blob/master/CHANGELOG.md#​131-httpswwwgithubcomgoogleapispython-test-utilscomparev130v131-2021-12-07) [Compare Source](https://togithub.com/googleapis/python-test-utils/compare/v1.3.0...v1.3.1)
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-bigquery-storage). --- bigquery_storage/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 383d41f0a81..48472e0052a 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ -google-cloud-testutils==1.3.0 +google-cloud-testutils==1.3.1 pytest==6.2.5 From ed937bd5910a8097de04067761a1fe15fee5ec1b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 11 Jan 2022 07:41:06 -0500 Subject: [PATCH 141/338] chore(samples): Add check for tests in directory (#377) Source-Link: https://github.com/googleapis/synthtool/commit/52aef91f8d25223d9dbdb4aebd94ba8eea2101f3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 Co-authored-by: Owl Bot --- bigquery_storage/quickstart/noxfile.py | 70 +++++++++++++----------- bigquery_storage/snippets/noxfile.py | 70 +++++++++++++----------- bigquery_storage/to_dataframe/noxfile.py | 70 +++++++++++++----------- 3 files changed, 117 insertions(+), 93 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index 93a9122cc45..3bbef5d54f4 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/bigquery_storage/snippets/noxfile.py b/bigquery_storage/snippets/noxfile.py index 93a9122cc45..3bbef5d54f4 100644 --- a/bigquery_storage/snippets/noxfile.py +++ b/bigquery_storage/snippets/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index 93a9122cc45..3bbef5d54f4 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) From ba2c8883dcc4cb26b0005538fcaedbed9ceb04ed Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 18 Jan 2022 20:24:34 -0500 Subject: [PATCH 142/338] chore(python): Noxfile recognizes that tests can live in a folder (#383) Source-Link: https://github.com/googleapis/synthtool/commit/4760d8dce1351d93658cb11d02a1b7ceb23ae5d7 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f0e4b51deef56bed74d3e2359c583fc104a8d6367da3984fc5c66938db738828 Co-authored-by: Owl Bot --- bigquery_storage/quickstart/noxfile.py | 1 + bigquery_storage/snippets/noxfile.py | 1 + bigquery_storage/to_dataframe/noxfile.py | 1 + 3 files changed, 3 insertions(+) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index 3bbef5d54f4..20cdfc62013 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -187,6 +187,7 @@ def _session_tests( ) -> None: # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) if len(test_list) == 0: print("No tests found, skipping directory.") else: diff --git a/bigquery_storage/snippets/noxfile.py b/bigquery_storage/snippets/noxfile.py index 3bbef5d54f4..20cdfc62013 100644 --- a/bigquery_storage/snippets/noxfile.py +++ b/bigquery_storage/snippets/noxfile.py @@ -187,6 +187,7 @@ def _session_tests( ) -> None: # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) if len(test_list) == 0: print("No tests found, skipping directory.") else: diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index 3bbef5d54f4..20cdfc62013 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -187,6 +187,7 @@ def _session_tests( ) -> None: # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) if len(test_list) == 0: print("No tests found, skipping directory.") else: From c9160e6e225e9706e5a6a1ad8bfadbfc33681d56 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Mon, 14 Feb 2022 12:06:32 -0600 Subject: [PATCH 143/338] doc: add region tags to `customer_record.proto` so it can be embedded (#391) In response to customer feedback, where they had trouble finding the module generated from this file. --- bigquery_storage/snippets/customer_record.proto | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bigquery_storage/snippets/customer_record.proto b/bigquery_storage/snippets/customer_record.proto index 06142c3b2e2..6c79336b6fa 100644 --- a/bigquery_storage/snippets/customer_record.proto +++ b/bigquery_storage/snippets/customer_record.proto @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +// [START bigquerystorage_append_rows_pending_customer_record] // The BigQuery Storage API expects protocol buffer data to be encoded in the // proto2 wire format. This allows it to disambiguate missing optional fields // from default values without the need for wrapper types. @@ -26,3 +27,4 @@ message CustomerRecord { // Use the required keyword for client-side validation of required fields. required int64 row_num = 2; } +// [END bigquerystorage_append_rows_pending_customer_record] From d46da7e55314d33c6c948a2ff34a3353e86a351f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 18 Feb 2022 11:45:35 -0600 Subject: [PATCH 144/338] fix: remove bigquery.readonly auth scope (#394) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.63.2 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix!: remove bigquery.readonly auth scope PiperOrigin-RevId: 429331987 Source-Link: https://github.com/googleapis/googleapis/commit/4b2bd2637d6df992933b7437cf64f1c7d7b6ed9b Source-Link: https://github.com/googleapis/googleapis-gen/commit/fa17f03b5ae96316cd02f2997f2fd1196e034e5e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZmExN2YwM2I1YWU5NjMxNmNkMDJmMjk5N2YyZmQxMTk2ZTAzNGU1ZSJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md docs: add generated snippets PiperOrigin-RevId: 427792504 Source-Link: https://github.com/googleapis/googleapis/commit/55b9e1e0b3106c850d13958352bc0751147b6b15 Source-Link: https://github.com/googleapis/googleapis-gen/commit/bf4e86b753f42cb0edb1fd51fbe840d7da0a1cde Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYmY0ZTg2Yjc1M2Y0MmNiMGVkYjFmZDUxZmJlODQwZDdkYTBhMWNkZSJ9 Co-authored-by: Owl Bot Co-authored-by: Tim Swast --- ...ig_query_read_create_read_session_async.py | 44 + ...big_query_read_create_read_session_sync.py | 44 + ...orage_v1_big_query_read_read_rows_async.py | 45 + ...torage_v1_big_query_read_read_rows_sync.py | 45 + ..._big_query_read_split_read_stream_async.py | 44 + ...1_big_query_read_split_read_stream_sync.py | 44 + ...ge_v1_big_query_write_append_rows_async.py | 55 ++ ...age_v1_big_query_write_append_rows_sync.py | 55 ++ ..._write_batch_commit_write_streams_async.py | 47 + ...y_write_batch_commit_write_streams_sync.py | 47 + ...g_query_write_create_write_stream_async.py | 44 + ...ig_query_write_create_write_stream_sync.py | 44 + ...query_write_finalize_write_stream_async.py | 44 + ..._query_write_finalize_write_stream_sync.py | 44 + ...age_v1_big_query_write_flush_rows_async.py | 44 + ...rage_v1_big_query_write_flush_rows_sync.py | 44 + ..._big_query_write_get_write_stream_async.py | 44 + ...1_big_query_write_get_write_stream_sync.py | 44 + ...ig_query_read_create_read_session_async.py | 44 + ...big_query_read_create_read_session_sync.py | 44 + ..._v1beta2_big_query_read_read_rows_async.py | 45 + ...e_v1beta2_big_query_read_read_rows_sync.py | 45 + ..._big_query_read_split_read_stream_async.py | 44 + ...2_big_query_read_split_read_stream_sync.py | 44 + ...beta2_big_query_write_append_rows_async.py | 57 ++ ...1beta2_big_query_write_append_rows_sync.py | 57 ++ ..._write_batch_commit_write_streams_async.py | 47 + ...y_write_batch_commit_write_streams_sync.py | 47 + ...g_query_write_create_write_stream_async.py | 44 + ...ig_query_write_create_write_stream_sync.py | 44 + ...query_write_finalize_write_stream_async.py | 44 + ..._query_write_finalize_write_stream_sync.py | 44 + ...1beta2_big_query_write_flush_rows_async.py | 46 + ...v1beta2_big_query_write_flush_rows_sync.py | 46 + ..._big_query_write_get_write_stream_async.py | 44 + ...2_big_query_write_get_write_stream_sync.py | 44 + .../snippet_metadata_bigquery storage_v1.json | 805 ++++++++++++++++++ ...pet_metadata_bigquery storage_v1beta2.json | 805 ++++++++++++++++++ 38 files changed, 3262 insertions(+) create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_sync.py create mode 100644 bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json create mode 100644 bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_async.py new file mode 100644 index 00000000000..f6af0cb936b --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateReadSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_create_read_session(): + # Create a client + client = bigquery_storage_v1.BigQueryReadAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.CreateReadSessionRequest(parent="parent_value",) + + # Make the request + response = await client.create_read_session(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_sync.py new file mode 100644 index 00000000000..a9147ca7561 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateReadSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_create_read_session(): + # Create a client + client = bigquery_storage_v1.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.CreateReadSessionRequest(parent="parent_value",) + + # Make the request + response = client.create_read_session(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_async.py new file mode 100644 index 00000000000..790378a27ed --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReadRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_read_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryReadAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.ReadRowsRequest(read_stream="read_stream_value",) + + # Make the request + stream = await client.read_rows(request=request) + + # Handle the response + async for response in stream: + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_sync.py new file mode 100644 index 00000000000..bab32a0b42a --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReadRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_read_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.ReadRowsRequest(read_stream="read_stream_value",) + + # Make the request + stream = client.read_rows(request=request) + + # Handle the response + for response in stream: + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_async.py new file mode 100644 index 00000000000..104b1ab54d3 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SplitReadStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_split_read_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryReadAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.SplitReadStreamRequest(name="name_value",) + + # Make the request + response = await client.split_read_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_sync.py new file mode 100644 index 00000000000..2f46b2a61c0 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SplitReadStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_split_read_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.SplitReadStreamRequest(name="name_value",) + + # Make the request + response = client.split_read_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_async.py new file mode 100644 index 00000000000..9d1040260c7 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AppendRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_append_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.AppendRowsRequest(write_stream="write_stream_value",) + + # This method expects an iterator which contains + # 'bigquery_storage_v1.AppendRowsRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.append_rows(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_sync.py new file mode 100644 index 00000000000..40fae91106b --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AppendRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_append_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.AppendRowsRequest(write_stream="write_stream_value",) + + # This method expects an iterator which contains + # 'bigquery_storage_v1.AppendRowsRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.append_rows(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_async.py new file mode 100644 index 00000000000..d6fd00aaf94 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_async.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCommitWriteStreams +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_batch_commit_write_streams(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.BatchCommitWriteStreamsRequest( + parent="parent_value", + write_streams=["write_streams_value_1", "write_streams_value_2"], + ) + + # Make the request + response = await client.batch_commit_write_streams(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_sync.py new file mode 100644 index 00000000000..b8f877bddad --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_sync.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCommitWriteStreams +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_batch_commit_write_streams(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.BatchCommitWriteStreamsRequest( + parent="parent_value", + write_streams=["write_streams_value_1", "write_streams_value_2"], + ) + + # Make the request + response = client.batch_commit_write_streams(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_async.py new file mode 100644 index 00000000000..fbdb1a395e4 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_create_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.CreateWriteStreamRequest(parent="parent_value",) + + # Make the request + response = await client.create_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_sync.py new file mode 100644 index 00000000000..dcd116313eb --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_create_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.CreateWriteStreamRequest(parent="parent_value",) + + # Make the request + response = client.create_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_async.py new file mode 100644 index 00000000000..c3b8abb53a0 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FinalizeWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_finalize_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.FinalizeWriteStreamRequest(name="name_value",) + + # Make the request + response = await client.finalize_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_sync.py new file mode 100644 index 00000000000..69c14a0e809 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FinalizeWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_finalize_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.FinalizeWriteStreamRequest(name="name_value",) + + # Make the request + response = client.finalize_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_async.py new file mode 100644 index 00000000000..6d82da59801 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FlushRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_flush_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.FlushRowsRequest(write_stream="write_stream_value",) + + # Make the request + response = await client.flush_rows(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_sync.py new file mode 100644 index 00000000000..15862b8cab3 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FlushRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_flush_rows(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.FlushRowsRequest(write_stream="write_stream_value",) + + # Make the request + response = client.flush_rows(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_async.py new file mode 100644 index 00000000000..274003a98e1 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_async] +from google.cloud import bigquery_storage_v1 + + +async def sample_get_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.GetWriteStreamRequest(name="name_value",) + + # Make the request + response = await client.get_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_sync.py new file mode 100644 index 00000000000..167273695ef --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_sync] +from google.cloud import bigquery_storage_v1 + + +def sample_get_write_stream(): + # Create a client + client = bigquery_storage_v1.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1.GetWriteStreamRequest(name="name_value",) + + # Make the request + response = client.get_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_async.py new file mode 100644 index 00000000000..f16b850fd86 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateReadSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_create_read_session(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.CreateReadSessionRequest(parent="parent_value",) + + # Make the request + response = await client.create_read_session(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_sync.py new file mode 100644 index 00000000000..bbc936dfa20 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateReadSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_create_read_session(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.CreateReadSessionRequest(parent="parent_value",) + + # Make the request + response = client.create_read_session(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_async.py new file mode 100644 index 00000000000..3b13b97ad72 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReadRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_read_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.ReadRowsRequest(read_stream="read_stream_value",) + + # Make the request + stream = await client.read_rows(request=request) + + # Handle the response + async for response in stream: + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_sync.py new file mode 100644 index 00000000000..9dd12df484f --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReadRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_read_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.ReadRowsRequest(read_stream="read_stream_value",) + + # Make the request + stream = client.read_rows(request=request) + + # Handle the response + for response in stream: + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_async.py new file mode 100644 index 00000000000..b3c9fbb1d5f --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SplitReadStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_split_read_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.SplitReadStreamRequest(name="name_value",) + + # Make the request + response = await client.split_read_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_sync.py new file mode 100644 index 00000000000..0d81457a5cb --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SplitReadStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_split_read_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryReadClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.SplitReadStreamRequest(name="name_value",) + + # Make the request + response = client.split_read_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_async.py new file mode 100644 index 00000000000..bda4f37b947 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AppendRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_append_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.AppendRowsRequest( + write_stream="write_stream_value", + ) + + # This method expects an iterator which contains + # 'bigquery_storage_v1beta2.AppendRowsRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.append_rows(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_sync.py new file mode 100644 index 00000000000..a8f5596d1fb --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AppendRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_append_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.AppendRowsRequest( + write_stream="write_stream_value", + ) + + # This method expects an iterator which contains + # 'bigquery_storage_v1beta2.AppendRowsRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.append_rows(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_async.py new file mode 100644 index 00000000000..dace7ac7abf --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_async.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCommitWriteStreams +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_batch_commit_write_streams(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.BatchCommitWriteStreamsRequest( + parent="parent_value", + write_streams=["write_streams_value_1", "write_streams_value_2"], + ) + + # Make the request + response = await client.batch_commit_write_streams(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_sync.py new file mode 100644 index 00000000000..6113c6b470f --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_sync.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCommitWriteStreams +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_batch_commit_write_streams(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.BatchCommitWriteStreamsRequest( + parent="parent_value", + write_streams=["write_streams_value_1", "write_streams_value_2"], + ) + + # Make the request + response = client.batch_commit_write_streams(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_async.py new file mode 100644 index 00000000000..7645b9fef00 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_create_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.CreateWriteStreamRequest(parent="parent_value",) + + # Make the request + response = await client.create_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_sync.py new file mode 100644 index 00000000000..d97ca878e0e --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_create_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.CreateWriteStreamRequest(parent="parent_value",) + + # Make the request + response = client.create_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_async.py new file mode 100644 index 00000000000..ca26d51ec3e --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FinalizeWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_finalize_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.FinalizeWriteStreamRequest(name="name_value",) + + # Make the request + response = await client.finalize_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_sync.py new file mode 100644 index 00000000000..71501df17a9 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FinalizeWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_finalize_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.FinalizeWriteStreamRequest(name="name_value",) + + # Make the request + response = client.finalize_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_async.py new file mode 100644 index 00000000000..8fa8bb25f50 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FlushRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_flush_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.FlushRowsRequest( + write_stream="write_stream_value", + ) + + # Make the request + response = await client.flush_rows(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_sync.py new file mode 100644 index 00000000000..569139e24bf --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FlushRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_flush_rows(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.FlushRowsRequest( + write_stream="write_stream_value", + ) + + # Make the request + response = client.flush_rows(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_async.py new file mode 100644 index 00000000000..2ef47a39637 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_async] +from google.cloud import bigquery_storage_v1beta2 + + +async def sample_get_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.GetWriteStreamRequest(name="name_value",) + + # Make the request + response = await client.get_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_sync.py new file mode 100644 index 00000000000..8a0f081a476 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetWriteStream +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_sync] +from google.cloud import bigquery_storage_v1beta2 + + +def sample_get_write_stream(): + # Create a client + client = bigquery_storage_v1beta2.BigQueryWriteClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta2.GetWriteStreamRequest(name="name_value",) + + # Make the request + response = client.get_write_stream(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_sync] diff --git a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json new file mode 100644 index 00000000000..a47e9843fed --- /dev/null +++ b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json @@ -0,0 +1,805 @@ +{ + "snippets": [ + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "CreateReadSession" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "CreateReadSession" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "ReadRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "ReadRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "SplitReadStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "SplitReadStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "AppendRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "AppendRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "BatchCommitWriteStreams" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "BatchCommitWriteStreams" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "CreateWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "CreateWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "FinalizeWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "FinalizeWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "FlushRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "FlushRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "GetWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "GetWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} diff --git a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json new file mode 100644 index 00000000000..3ea6be83094 --- /dev/null +++ b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json @@ -0,0 +1,805 @@ +{ + "snippets": [ + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "CreateReadSession" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "CreateReadSession" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "ReadRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "ReadRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "SplitReadStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryRead" + }, + "shortName": "SplitReadStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "AppendRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "AppendRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "BatchCommitWriteStreams" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "BatchCommitWriteStreams" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "CreateWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "CreateWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "FinalizeWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "FinalizeWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "FlushRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "FlushRows" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "GetWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_async.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "BigQueryWrite" + }, + "shortName": "GetWriteStream" + } + }, + "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_sync.py", + "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} From deecfea1f377f2d8954354d6b5fb30d67b7bfec1 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 25 Feb 2022 16:28:25 -0500 Subject: [PATCH 145/338] chore: use gapic-generator-python 0.63.4 (#397) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.63.4 chore: fix snippet region tag format chore: fix docstring code block formatting PiperOrigin-RevId: 430730865 Source-Link: https://github.com/googleapis/googleapis/commit/ea5800229f73f94fd7204915a86ed09dcddf429a Source-Link: https://github.com/googleapis/googleapis-gen/commit/ca893ff8af25fc7fe001de1405a517d80446ecca Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2E4OTNmZjhhZjI1ZmM3ZmUwMDFkZTE0MDVhNTE3ZDgwNDQ2ZWNjYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * remove redundant samples Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- ...g_query_read_create_read_session_async.py} | 4 +- ...ig_query_read_create_read_session_sync.py} | 4 +- ...nerated_big_query_read_read_rows_async.py} | 4 +- ...enerated_big_query_read_read_rows_sync.py} | 4 +- ...big_query_read_split_read_stream_async.py} | 4 +- ..._big_query_read_split_read_stream_sync.py} | 4 +- ...ated_big_query_write_append_rows_async.py} | 4 +- ...rated_big_query_write_append_rows_sync.py} | 4 +- ...write_batch_commit_write_streams_async.py} | 4 +- ..._write_batch_commit_write_streams_sync.py} | 4 +- ..._query_write_create_write_stream_async.py} | 4 +- ...g_query_write_create_write_stream_sync.py} | 4 +- ...uery_write_finalize_write_stream_async.py} | 4 +- ...query_write_finalize_write_stream_sync.py} | 4 +- ...rated_big_query_write_flush_rows_async.py} | 4 +- ...erated_big_query_write_flush_rows_sync.py} | 4 +- ...big_query_write_get_write_stream_async.py} | 4 +- ..._big_query_write_get_write_stream_sync.py} | 4 +- ...g_query_read_create_read_session_async.py} | 4 +- ...ig_query_read_create_read_session_sync.py} | 4 +- ...nerated_big_query_read_read_rows_async.py} | 4 +- ...enerated_big_query_read_read_rows_sync.py} | 4 +- ...big_query_read_split_read_stream_async.py} | 4 +- ..._big_query_read_split_read_stream_sync.py} | 4 +- ...ated_big_query_write_append_rows_async.py} | 4 +- ...rated_big_query_write_append_rows_sync.py} | 4 +- ...write_batch_commit_write_streams_async.py} | 4 +- ..._write_batch_commit_write_streams_sync.py} | 4 +- ..._query_write_create_write_stream_async.py} | 4 +- ...g_query_write_create_write_stream_sync.py} | 4 +- ...uery_write_finalize_write_stream_async.py} | 4 +- ...query_write_finalize_write_stream_sync.py} | 4 +- ...rated_big_query_write_flush_rows_async.py} | 4 +- ...erated_big_query_write_flush_rows_sync.py} | 4 +- ...big_query_write_get_write_stream_async.py} | 4 +- ..._big_query_write_get_write_stream_sync.py} | 4 +- .../snippet_metadata_bigquery storage_v1.json | 72 +++++++++---------- ...pet_metadata_bigquery storage_v1beta2.json | 72 +++++++++---------- 38 files changed, 144 insertions(+), 144 deletions(-) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_async.py => bigquerystorage_v1_generated_big_query_read_create_read_session_async.py} (88%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_sync.py => bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py} (88%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_async.py => bigquerystorage_v1_generated_big_query_read_read_rows_async.py} (89%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_sync.py => bigquerystorage_v1_generated_big_query_read_read_rows_sync.py} (89%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_async.py => bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py} (88%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_sync.py => bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py} (88%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_async.py => bigquerystorage_v1_generated_big_query_write_append_rows_async.py} (90%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_sync.py => bigquerystorage_v1_generated_big_query_write_append_rows_sync.py} (90%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_async.py => bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py} (88%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_sync.py => bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py} (88%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_async.py => bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py} (88%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_sync.py => bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py} (88%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_async.py => bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py} (87%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_sync.py => bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py} (87%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_async.py => bigquerystorage_v1_generated_big_query_write_flush_rows_async.py} (88%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_sync.py => bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py} (88%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_async.py => bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py} (88%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_sync.py => bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py} (88%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_async.py => bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py} (87%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_sync.py => bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py} (87%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_async.py => bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py} (88%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_sync.py => bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py} (88%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_async.py => bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py} (87%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_sync.py => bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py} (87%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_async.py => bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py} (90%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_sync.py => bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py} (90%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_async.py => bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py} (87%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_sync.py => bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py} (87%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_async.py => bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py} (87%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_sync.py => bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py} (87%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_async.py => bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py} (87%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_sync.py => bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py} (87%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_async.py => bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py} (88%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_sync.py => bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py} (88%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_async.py => bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py} (87%) rename bigquery_storage/generated_samples/{bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_sync.py => bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py} (87%) diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py similarity index 88% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py index f6af0cb936b..d7e44668b83 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_async] +# [START bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async] from google.cloud import bigquery_storage_v1 @@ -41,4 +41,4 @@ async def sample_create_read_session(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_async] +# [END bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py similarity index 88% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py index a9147ca7561..ed4df7acf1f 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_sync] +# [START bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_sync] from google.cloud import bigquery_storage_v1 @@ -41,4 +41,4 @@ def sample_create_read_session(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_sync] +# [END bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py similarity index 89% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py index 790378a27ed..1fc52c6c890 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_async] +# [START bigquerystorage_v1_generated_BigQueryRead_ReadRows_async] from google.cloud import bigquery_storage_v1 @@ -42,4 +42,4 @@ async def sample_read_rows(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_async] +# [END bigquerystorage_v1_generated_BigQueryRead_ReadRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py similarity index 89% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py index bab32a0b42a..3ce81f92337 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_sync] +# [START bigquerystorage_v1_generated_BigQueryRead_ReadRows_sync] from google.cloud import bigquery_storage_v1 @@ -42,4 +42,4 @@ def sample_read_rows(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_sync] +# [END bigquerystorage_v1_generated_BigQueryRead_ReadRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py similarity index 88% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py index 104b1ab54d3..ffc55999f7e 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_async] +# [START bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async] from google.cloud import bigquery_storage_v1 @@ -41,4 +41,4 @@ async def sample_split_read_stream(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_async] +# [END bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py similarity index 88% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py index 2f46b2a61c0..2662245440e 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_sync] +# [START bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_sync] from google.cloud import bigquery_storage_v1 @@ -41,4 +41,4 @@ def sample_split_read_stream(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_sync] +# [END bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py similarity index 90% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py index 9d1040260c7..6bf956d5ab0 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_async] +# [START bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async] from google.cloud import bigquery_storage_v1 @@ -52,4 +52,4 @@ def request_generator(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_async] +# [END bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py similarity index 90% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py index 40fae91106b..18950b52d25 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_sync] +# [START bigquerystorage_v1_generated_BigQueryWrite_AppendRows_sync] from google.cloud import bigquery_storage_v1 @@ -52,4 +52,4 @@ def request_generator(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_sync] +# [END bigquerystorage_v1_generated_BigQueryWrite_AppendRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py similarity index 88% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py index d6fd00aaf94..d0dc11c31c8 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_async] +# [START bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async] from google.cloud import bigquery_storage_v1 @@ -44,4 +44,4 @@ async def sample_batch_commit_write_streams(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_async] +# [END bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py similarity index 88% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py index b8f877bddad..6ae627b4743 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_sync] +# [START bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_sync] from google.cloud import bigquery_storage_v1 @@ -44,4 +44,4 @@ def sample_batch_commit_write_streams(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_sync] +# [END bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py similarity index 88% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py index fbdb1a395e4..78ec9abe0be 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_async] +# [START bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async] from google.cloud import bigquery_storage_v1 @@ -41,4 +41,4 @@ async def sample_create_write_stream(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_async] +# [END bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py similarity index 88% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py index dcd116313eb..4a2a178c4e6 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_sync] +# [START bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_sync] from google.cloud import bigquery_storage_v1 @@ -41,4 +41,4 @@ def sample_create_write_stream(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_sync] +# [END bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py similarity index 87% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py index c3b8abb53a0..c60042e277d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_async] +# [START bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async] from google.cloud import bigquery_storage_v1 @@ -41,4 +41,4 @@ async def sample_finalize_write_stream(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_async] +# [END bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py similarity index 87% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py index 69c14a0e809..338b4797e67 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_sync] +# [START bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_sync] from google.cloud import bigquery_storage_v1 @@ -41,4 +41,4 @@ def sample_finalize_write_stream(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_sync] +# [END bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py similarity index 88% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py index 6d82da59801..17e9d914037 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_async] +# [START bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async] from google.cloud import bigquery_storage_v1 @@ -41,4 +41,4 @@ async def sample_flush_rows(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_async] +# [END bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py similarity index 88% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py index 15862b8cab3..6908ae7b35c 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_sync] +# [START bigquerystorage_v1_generated_BigQueryWrite_FlushRows_sync] from google.cloud import bigquery_storage_v1 @@ -41,4 +41,4 @@ def sample_flush_rows(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_sync] +# [END bigquerystorage_v1_generated_BigQueryWrite_FlushRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py similarity index 88% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py index 274003a98e1..c31ab505a8a 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_async] +# [START bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async] from google.cloud import bigquery_storage_v1 @@ -41,4 +41,4 @@ async def sample_get_write_stream(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_async] +# [END bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py similarity index 88% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py index 167273695ef..4c78053d037 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_sync] +# [START bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_sync] from google.cloud import bigquery_storage_v1 @@ -41,4 +41,4 @@ def sample_get_write_stream(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_sync] +# [END bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py similarity index 87% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py index f16b850fd86..303e00f0ada 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_async] +# [START bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_async] from google.cloud import bigquery_storage_v1beta2 @@ -41,4 +41,4 @@ async def sample_create_read_session(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_async] +# [END bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py similarity index 87% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py index bbc936dfa20..da4715e4d57 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_sync] +# [START bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_sync] from google.cloud import bigquery_storage_v1beta2 @@ -41,4 +41,4 @@ def sample_create_read_session(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_sync] +# [END bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py similarity index 88% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py index 3b13b97ad72..5d1b932e955 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_async] +# [START bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_async] from google.cloud import bigquery_storage_v1beta2 @@ -42,4 +42,4 @@ async def sample_read_rows(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_async] +# [END bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py similarity index 88% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py index 9dd12df484f..fad348d7203 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_sync] +# [START bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_sync] from google.cloud import bigquery_storage_v1beta2 @@ -42,4 +42,4 @@ def sample_read_rows(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_sync] +# [END bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py similarity index 87% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py index b3c9fbb1d5f..57da868ca53 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_async] +# [START bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_async] from google.cloud import bigquery_storage_v1beta2 @@ -41,4 +41,4 @@ async def sample_split_read_stream(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_async] +# [END bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py similarity index 87% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py index 0d81457a5cb..095b874ae4d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_sync] +# [START bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_sync] from google.cloud import bigquery_storage_v1beta2 @@ -41,4 +41,4 @@ def sample_split_read_stream(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_sync] +# [END bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py similarity index 90% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py index bda4f37b947..743f6f9d2c9 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_async] +# [START bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_async] from google.cloud import bigquery_storage_v1beta2 @@ -54,4 +54,4 @@ def request_generator(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_async] +# [END bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py similarity index 90% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py index a8f5596d1fb..b47d4edc484 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_sync] +# [START bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_sync] from google.cloud import bigquery_storage_v1beta2 @@ -54,4 +54,4 @@ def request_generator(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_sync] +# [END bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py similarity index 87% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py index dace7ac7abf..6dd7b4abf71 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_async] +# [START bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_async] from google.cloud import bigquery_storage_v1beta2 @@ -44,4 +44,4 @@ async def sample_batch_commit_write_streams(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_async] +# [END bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py similarity index 87% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py index 6113c6b470f..c7ec096e5e8 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_sync] +# [START bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_sync] from google.cloud import bigquery_storage_v1beta2 @@ -44,4 +44,4 @@ def sample_batch_commit_write_streams(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_sync] +# [END bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py similarity index 87% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py index 7645b9fef00..a2f2d8b0ec3 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_async] +# [START bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_async] from google.cloud import bigquery_storage_v1beta2 @@ -41,4 +41,4 @@ async def sample_create_write_stream(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_async] +# [END bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py similarity index 87% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py index d97ca878e0e..d53977c1ce2 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_sync] +# [START bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_sync] from google.cloud import bigquery_storage_v1beta2 @@ -41,4 +41,4 @@ def sample_create_write_stream(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_sync] +# [END bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py similarity index 87% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py index ca26d51ec3e..2e99de0ba4d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_async] +# [START bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_async] from google.cloud import bigquery_storage_v1beta2 @@ -41,4 +41,4 @@ async def sample_finalize_write_stream(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_async] +# [END bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py similarity index 87% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py index 71501df17a9..cd25035cf4a 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_sync] +# [START bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_sync] from google.cloud import bigquery_storage_v1beta2 @@ -41,4 +41,4 @@ def sample_finalize_write_stream(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_sync] +# [END bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py similarity index 88% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py index 8fa8bb25f50..512bd040af6 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_async] +# [START bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_async] from google.cloud import bigquery_storage_v1beta2 @@ -43,4 +43,4 @@ async def sample_flush_rows(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_async] +# [END bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py similarity index 88% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py index 569139e24bf..396f0b63439 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_sync] +# [START bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_sync] from google.cloud import bigquery_storage_v1beta2 @@ -43,4 +43,4 @@ def sample_flush_rows(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_sync] +# [END bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py similarity index 87% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_async.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py index 2ef47a39637..b1d2c64e4a0 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_async] +# [START bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_async] from google.cloud import bigquery_storage_v1beta2 @@ -41,4 +41,4 @@ async def sample_get_write_stream(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_async] +# [END bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py similarity index 87% rename from bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_sync.py rename to bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py index 8a0f081a476..39d07cf0937 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-bigquery-storage -# [START bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_sync] +# [START bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_sync] from google.cloud import bigquery_storage_v1beta2 @@ -41,4 +41,4 @@ def sample_get_write_stream(): print(response) -# [END bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_sync] +# [END bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_sync] diff --git a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json index a47e9843fed..7a4af13b766 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json @@ -10,8 +10,8 @@ "shortName": "CreateReadSession" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_async", + "file": "bigquerystorage_v1_generated_big_query_read_create_read_session_async.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async", "segments": [ { "end": 44, @@ -54,8 +54,8 @@ "shortName": "CreateReadSession" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_create_read_session_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_CreateReadSession_sync", + "file": "bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_sync", "segments": [ { "end": 44, @@ -99,8 +99,8 @@ "shortName": "ReadRows" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_async", + "file": "bigquerystorage_v1_generated_big_query_read_read_rows_async.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_async", "segments": [ { "end": 45, @@ -143,8 +143,8 @@ "shortName": "ReadRows" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_read_rows_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_ReadRows_sync", + "file": "bigquerystorage_v1_generated_big_query_read_read_rows_sync.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_sync", "segments": [ { "end": 45, @@ -188,8 +188,8 @@ "shortName": "SplitReadStream" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_async", + "file": "bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async", "segments": [ { "end": 44, @@ -232,8 +232,8 @@ "shortName": "SplitReadStream" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_read_split_read_stream_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryRead_SplitReadStream_sync", + "file": "bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_sync", "segments": [ { "end": 44, @@ -277,8 +277,8 @@ "shortName": "AppendRows" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_async", + "file": "bigquerystorage_v1_generated_big_query_write_append_rows_async.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async", "segments": [ { "end": 55, @@ -321,8 +321,8 @@ "shortName": "AppendRows" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_append_rows_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_AppendRows_sync", + "file": "bigquerystorage_v1_generated_big_query_write_append_rows_sync.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_sync", "segments": [ { "end": 55, @@ -366,8 +366,8 @@ "shortName": "BatchCommitWriteStreams" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_async", + "file": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async", "segments": [ { "end": 45, @@ -410,8 +410,8 @@ "shortName": "BatchCommitWriteStreams" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_batch_commit_write_streams_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_BatchCommitWriteStreams_sync", + "file": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_sync", "segments": [ { "end": 45, @@ -455,8 +455,8 @@ "shortName": "CreateWriteStream" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_async", + "file": "bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async", "segments": [ { "end": 44, @@ -499,8 +499,8 @@ "shortName": "CreateWriteStream" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_create_write_stream_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_CreateWriteStream_sync", + "file": "bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_sync", "segments": [ { "end": 44, @@ -544,8 +544,8 @@ "shortName": "FinalizeWriteStream" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_async", + "file": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async", "segments": [ { "end": 44, @@ -588,8 +588,8 @@ "shortName": "FinalizeWriteStream" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_finalize_write_stream_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FinalizeWriteStream_sync", + "file": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_sync", "segments": [ { "end": 44, @@ -633,8 +633,8 @@ "shortName": "FlushRows" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_async", + "file": "bigquerystorage_v1_generated_big_query_write_flush_rows_async.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async", "segments": [ { "end": 44, @@ -677,8 +677,8 @@ "shortName": "FlushRows" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_flush_rows_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_FlushRows_sync", + "file": "bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_sync", "segments": [ { "end": 44, @@ -722,8 +722,8 @@ "shortName": "GetWriteStream" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_async", + "file": "bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async", "segments": [ { "end": 44, @@ -766,8 +766,8 @@ "shortName": "GetWriteStream" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1_big_query_write_get_write_stream_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1_BigQueryWrite_GetWriteStream_sync", + "file": "bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_sync", "segments": [ { "end": 44, diff --git a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json index 3ea6be83094..09d8681da8b 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json @@ -10,8 +10,8 @@ "shortName": "CreateReadSession" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_async", + "file": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_async", "segments": [ { "end": 44, @@ -54,8 +54,8 @@ "shortName": "CreateReadSession" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_create_read_session_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_CreateReadSession_sync", + "file": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_sync", "segments": [ { "end": 44, @@ -99,8 +99,8 @@ "shortName": "ReadRows" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_async", + "file": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_async", "segments": [ { "end": 45, @@ -143,8 +143,8 @@ "shortName": "ReadRows" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_read_rows_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_ReadRows_sync", + "file": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_sync", "segments": [ { "end": 45, @@ -188,8 +188,8 @@ "shortName": "SplitReadStream" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_async", + "file": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_async", "segments": [ { "end": 44, @@ -232,8 +232,8 @@ "shortName": "SplitReadStream" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_read_split_read_stream_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryRead_SplitReadStream_sync", + "file": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_sync", "segments": [ { "end": 44, @@ -277,8 +277,8 @@ "shortName": "AppendRows" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_async", + "file": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_async", "segments": [ { "end": 55, @@ -321,8 +321,8 @@ "shortName": "AppendRows" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_append_rows_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_AppendRows_sync", + "file": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_sync", "segments": [ { "end": 55, @@ -366,8 +366,8 @@ "shortName": "BatchCommitWriteStreams" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_async", + "file": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_async", "segments": [ { "end": 45, @@ -410,8 +410,8 @@ "shortName": "BatchCommitWriteStreams" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_batch_commit_write_streams_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_BatchCommitWriteStreams_sync", + "file": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_sync", "segments": [ { "end": 45, @@ -455,8 +455,8 @@ "shortName": "CreateWriteStream" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_async", + "file": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_async", "segments": [ { "end": 44, @@ -499,8 +499,8 @@ "shortName": "CreateWriteStream" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_create_write_stream_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_CreateWriteStream_sync", + "file": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_sync", "segments": [ { "end": 44, @@ -544,8 +544,8 @@ "shortName": "FinalizeWriteStream" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_async", + "file": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_async", "segments": [ { "end": 44, @@ -588,8 +588,8 @@ "shortName": "FinalizeWriteStream" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_finalize_write_stream_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FinalizeWriteStream_sync", + "file": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_sync", "segments": [ { "end": 44, @@ -633,8 +633,8 @@ "shortName": "FlushRows" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_async", + "file": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_async", "segments": [ { "end": 44, @@ -677,8 +677,8 @@ "shortName": "FlushRows" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_flush_rows_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_FlushRows_sync", + "file": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_sync", "segments": [ { "end": 44, @@ -722,8 +722,8 @@ "shortName": "GetWriteStream" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_async.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_async", + "file": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_async", "segments": [ { "end": 44, @@ -766,8 +766,8 @@ "shortName": "GetWriteStream" } }, - "file": "bigquerystorage_generated_bigquery_storage_v1beta2_big_query_write_get_write_stream_sync.py", - "regionTag": "bigquerystorage_generated_bigquery_storage_v1beta2_BigQueryWrite_GetWriteStream_sync", + "file": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_sync", "segments": [ { "end": 44, From dcb2bb723591571e5e3c0d4f564312f0afefb1e8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 26 Feb 2022 05:51:58 -0500 Subject: [PATCH 146/338] chore: update copyright year to 2022 (#399) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update copyright year to 2022 PiperOrigin-RevId: 431037888 Source-Link: https://github.com/googleapis/googleapis/commit/b3397f5febbf21dfc69b875ddabaf76bee765058 Source-Link: https://github.com/googleapis/googleapis-gen/commit/510b54e1cdefd53173984df16645081308fe897e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTEwYjU0ZTFjZGVmZDUzMTczOTg0ZGYxNjY0NTA4MTMwOGZlODk3ZSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- ...age_v1_generated_big_query_read_create_read_session_async.py | 2 +- ...rage_v1_generated_big_query_read_create_read_session_sync.py | 2 +- ...gquerystorage_v1_generated_big_query_read_read_rows_async.py | 2 +- ...igquerystorage_v1_generated_big_query_read_read_rows_sync.py | 2 +- ...orage_v1_generated_big_query_read_split_read_stream_async.py | 2 +- ...torage_v1_generated_big_query_read_split_read_stream_sync.py | 2 +- ...erystorage_v1_generated_big_query_write_append_rows_async.py | 2 +- ...uerystorage_v1_generated_big_query_write_append_rows_sync.py | 2 +- ...enerated_big_query_write_batch_commit_write_streams_async.py | 2 +- ...generated_big_query_write_batch_commit_write_streams_sync.py | 2 +- ...ge_v1_generated_big_query_write_create_write_stream_async.py | 2 +- ...age_v1_generated_big_query_write_create_write_stream_sync.py | 2 +- ..._v1_generated_big_query_write_finalize_write_stream_async.py | 2 +- ...e_v1_generated_big_query_write_finalize_write_stream_sync.py | 2 +- ...uerystorage_v1_generated_big_query_write_flush_rows_async.py | 2 +- ...querystorage_v1_generated_big_query_write_flush_rows_sync.py | 2 +- ...orage_v1_generated_big_query_write_get_write_stream_async.py | 2 +- ...torage_v1_generated_big_query_write_get_write_stream_sync.py | 2 +- ...1beta2_generated_big_query_read_create_read_session_async.py | 2 +- ...v1beta2_generated_big_query_read_create_read_session_sync.py | 2 +- ...ystorage_v1beta2_generated_big_query_read_read_rows_async.py | 2 +- ...rystorage_v1beta2_generated_big_query_read_read_rows_sync.py | 2 +- ..._v1beta2_generated_big_query_read_split_read_stream_async.py | 2 +- ...e_v1beta2_generated_big_query_read_split_read_stream_sync.py | 2 +- ...orage_v1beta2_generated_big_query_write_append_rows_async.py | 2 +- ...torage_v1beta2_generated_big_query_write_append_rows_sync.py | 2 +- ...enerated_big_query_write_batch_commit_write_streams_async.py | 2 +- ...generated_big_query_write_batch_commit_write_streams_sync.py | 2 +- ...beta2_generated_big_query_write_create_write_stream_async.py | 2 +- ...1beta2_generated_big_query_write_create_write_stream_sync.py | 2 +- ...ta2_generated_big_query_write_finalize_write_stream_async.py | 2 +- ...eta2_generated_big_query_write_finalize_write_stream_sync.py | 2 +- ...torage_v1beta2_generated_big_query_write_flush_rows_async.py | 2 +- ...storage_v1beta2_generated_big_query_write_flush_rows_sync.py | 2 +- ..._v1beta2_generated_big_query_write_get_write_stream_async.py | 2 +- ...e_v1beta2_generated_big_query_write_get_write_stream_sync.py | 2 +- 36 files changed, 36 insertions(+), 36 deletions(-) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py index d7e44668b83..ba4cbe4de3d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py index ed4df7acf1f..22ffda003ee 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py index 1fc52c6c890..c17828b7151 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py index 3ce81f92337..4f780989f2f 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py index ffc55999f7e..7d228fc7baf 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py index 2662245440e..4e75570ff18 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py index 6bf956d5ab0..01b023563d8 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py index 18950b52d25..14a89c4b46a 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py index d0dc11c31c8..d165b2682ef 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py index 6ae627b4743..ba80de9c918 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py index 78ec9abe0be..23201452a15 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py index 4a2a178c4e6..5ebc4468ccf 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py index c60042e277d..426cfc9226d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py index 338b4797e67..f86e15f72e7 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py index 17e9d914037..849b6759e46 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py index 6908ae7b35c..6881767eb7b 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py index c31ab505a8a..c00834717f6 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py index 4c78053d037..e292241b763 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py index 303e00f0ada..0890e640e0f 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py index da4715e4d57..4c60e7cea4b 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py index 5d1b932e955..7e5d879d30b 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py index fad348d7203..4a968671cc7 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py index 57da868ca53..6b775ab2dfa 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py index 095b874ae4d..6e7a4259d73 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py index 743f6f9d2c9..86d1d70dac3 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py index b47d4edc484..706f9d41a89 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py index 6dd7b4abf71..a534cd92ef2 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py index c7ec096e5e8..f31e195d15d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py index a2f2d8b0ec3..7e0eb529a5d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py index d53977c1ce2..a16af5af719 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py index 2e99de0ba4d..c208ac3c2e1 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py index cd25035cf4a..56c7ef44c31 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py index 512bd040af6..221be051cfb 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py index 396f0b63439..2a018b2bbe0 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py index b1d2c64e4a0..a84c8fd6f3d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py index 39d07cf0937..c6d3050af4c 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 5057a2a971c0a9efa79dbad886dd3c04a0edfbf2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Mar 2022 14:02:12 -0500 Subject: [PATCH 147/338] chore: Adding support for pytest-xdist and pytest-parallel (#406) Source-Link: https://github.com/googleapis/synthtool/commit/82f5cb283efffe96e1b6cd634738e0e7de2cd90a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5d8da01438ece4021d135433f2cf3227aa39ef0eaccc941d62aa35e6902832ae Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- bigquery_storage/quickstart/noxfile.py | 78 +++++++++++++----------- bigquery_storage/snippets/noxfile.py | 78 +++++++++++++----------- bigquery_storage/to_dataframe/noxfile.py | 78 +++++++++++++----------- 3 files changed, 132 insertions(+), 102 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index 20cdfc62013..85f5836dba3 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -188,42 +188,52 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) + elif "pytest-xdist" in packages: + concurrent_args.extend(["-n", "auto"]) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/bigquery_storage/snippets/noxfile.py b/bigquery_storage/snippets/noxfile.py index 20cdfc62013..85f5836dba3 100644 --- a/bigquery_storage/snippets/noxfile.py +++ b/bigquery_storage/snippets/noxfile.py @@ -188,42 +188,52 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) + elif "pytest-xdist" in packages: + concurrent_args.extend(["-n", "auto"]) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index 20cdfc62013..85f5836dba3 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -188,42 +188,52 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) + elif "pytest-xdist" in packages: + concurrent_args.extend(["-n", "auto"]) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) From acb40968a628f4b03ebf66b2e58351edea8c190c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 7 Mar 2022 18:28:56 +0100 Subject: [PATCH 148/338] chore(deps): update all dependencies (#409) * chore(deps): update all dependencies * Remove py36 pins * add pin for pandas for python 3.7 Co-authored-by: Anthonios Partheniou --- .../quickstart/requirements-test.txt | 2 +- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements.txt | 6 +++--- .../to_dataframe/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 17 ++++++++--------- 6 files changed, 15 insertions(+), 16 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 927094516e6..c2845bffbe8 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==6.2.5 +pytest==7.0.1 diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 27d2f18adfe..f770a1599e5 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.10.1 +google-cloud-bigquery-storage==2.12.0 diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 48472e0052a..048b49808a9 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ google-cloud-testutils==1.3.1 -pytest==6.2.5 +pytest==7.0.1 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 6586c02e740..2f57acf5b24 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-bigquery-storage==2.10.1 -google-cloud-bigquery==2.31.0 -protobuf==3.19.1 +google-cloud-bigquery-storage==2.12.0 +google-cloud-bigquery==2.34.1 +protobuf==3.19.4 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 927094516e6..c2845bffbe8 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==6.2.5 +pytest==7.0.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index c077506cec0..44afe1442fe 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,9 +1,8 @@ -google-auth==2.3.3 -google-cloud-bigquery-storage==2.10.1 -google-cloud-bigquery==2.31.0 -pyarrow==6.0.1 -ipython==7.24.0; python_version > '3.6' -ipython==7.16.1; python_version <= '3.6' -pandas==1.2.5; python_version > '3.6' -pandas==1.1.5; python_version <= '3.6' -tqdm==4.62.3 +google-auth==2.6.0 +google-cloud-bigquery-storage==2.12.0 +google-cloud-bigquery==2.34.1 +pyarrow==7.0.0 +ipython==7.31.1 +pandas==1.3.5; python_version == '3.7' +pandas==1.4.1; python_version > '3.7' +tqdm==4.63.0 From 2e7036ac0fb5843025a990805d9ccf45d1182851 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 7 Mar 2022 19:30:50 +0100 Subject: [PATCH 149/338] chore(deps): update all dependencies (#410) * chore(deps): update all dependencies * revert * add pins for ipython for py37 and py38 Co-authored-by: Anthonios Partheniou --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 2f57acf5b24..9ac3d905741 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ google-cloud-bigquery-storage==2.12.0 -google-cloud-bigquery==2.34.1 +google-cloud-bigquery==2.34.2 protobuf==3.19.4 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 44afe1442fe..6eaeef49f48 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,8 +1,10 @@ google-auth==2.6.0 google-cloud-bigquery-storage==2.12.0 -google-cloud-bigquery==2.34.1 +google-cloud-bigquery==2.34.2 pyarrow==7.0.0 -ipython==7.31.1 +ipython==7.31.1; python_version == '3.7' +ipython==8.0.1; python_version == '3.8' +ipython==8.1.1; python_version >= '3.9' pandas==1.3.5; python_version == '3.7' pandas==1.4.1; python_version > '3.7' tqdm==4.63.0 From cfae702361661a3aaa6937f5974e85c211b17afd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 29 Mar 2022 00:02:11 +0000 Subject: [PATCH 150/338] chore(python): use black==22.3.0 (#417) Source-Link: https://github.com/googleapis/synthtool/commit/6fab84af09f2cf89a031fd8671d1def6b2931b11 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe --- ..._v1_generated_big_query_read_create_read_session_async.py | 4 +++- ...e_v1_generated_big_query_read_create_read_session_sync.py | 4 +++- ...erystorage_v1_generated_big_query_read_read_rows_async.py | 4 +++- ...uerystorage_v1_generated_big_query_read_read_rows_sync.py | 4 +++- ...ge_v1_generated_big_query_read_split_read_stream_async.py | 4 +++- ...age_v1_generated_big_query_read_split_read_stream_sync.py | 4 +++- ...storage_v1_generated_big_query_write_append_rows_async.py | 4 +++- ...ystorage_v1_generated_big_query_write_append_rows_sync.py | 4 +++- ...v1_generated_big_query_write_create_write_stream_async.py | 4 +++- ..._v1_generated_big_query_write_create_write_stream_sync.py | 4 +++- ..._generated_big_query_write_finalize_write_stream_async.py | 4 +++- ...1_generated_big_query_write_finalize_write_stream_sync.py | 4 +++- ...ystorage_v1_generated_big_query_write_flush_rows_async.py | 4 +++- ...rystorage_v1_generated_big_query_write_flush_rows_sync.py | 4 +++- ...ge_v1_generated_big_query_write_get_write_stream_async.py | 4 +++- ...age_v1_generated_big_query_write_get_write_stream_sync.py | 4 +++- ...ta2_generated_big_query_read_create_read_session_async.py | 4 +++- ...eta2_generated_big_query_read_create_read_session_sync.py | 4 +++- ...orage_v1beta2_generated_big_query_read_read_rows_async.py | 4 +++- ...torage_v1beta2_generated_big_query_read_read_rows_sync.py | 4 +++- ...beta2_generated_big_query_read_split_read_stream_async.py | 4 +++- ...1beta2_generated_big_query_read_split_read_stream_sync.py | 4 +++- ...a2_generated_big_query_write_create_write_stream_async.py | 4 +++- ...ta2_generated_big_query_write_create_write_stream_sync.py | 4 +++- ..._generated_big_query_write_finalize_write_stream_async.py | 4 +++- ...2_generated_big_query_write_finalize_write_stream_sync.py | 4 +++- ...beta2_generated_big_query_write_get_write_stream_async.py | 4 +++- ...1beta2_generated_big_query_write_get_write_stream_sync.py | 4 +++- bigquery_storage/quickstart/noxfile.py | 4 ++-- bigquery_storage/snippets/append_rows_proto2_test.py | 5 ++++- bigquery_storage/snippets/noxfile.py | 4 ++-- bigquery_storage/snippets/sample_data_pb2.py | 4 +++- bigquery_storage/to_dataframe/noxfile.py | 4 ++-- bigquery_storage/to_dataframe/read_table_bqstorage.py | 4 +++- 34 files changed, 100 insertions(+), 37 deletions(-) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py index ba4cbe4de3d..a8e43397c7e 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py @@ -32,7 +32,9 @@ async def sample_create_read_session(): client = bigquery_storage_v1.BigQueryReadAsyncClient() # Initialize request argument(s) - request = bigquery_storage_v1.CreateReadSessionRequest(parent="parent_value",) + request = bigquery_storage_v1.CreateReadSessionRequest( + parent="parent_value", + ) # Make the request response = await client.create_read_session(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py index 22ffda003ee..f38b679c375 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py @@ -32,7 +32,9 @@ def sample_create_read_session(): client = bigquery_storage_v1.BigQueryReadClient() # Initialize request argument(s) - request = bigquery_storage_v1.CreateReadSessionRequest(parent="parent_value",) + request = bigquery_storage_v1.CreateReadSessionRequest( + parent="parent_value", + ) # Make the request response = client.create_read_session(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py index c17828b7151..5ca0fd07a94 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py @@ -32,7 +32,9 @@ async def sample_read_rows(): client = bigquery_storage_v1.BigQueryReadAsyncClient() # Initialize request argument(s) - request = bigquery_storage_v1.ReadRowsRequest(read_stream="read_stream_value",) + request = bigquery_storage_v1.ReadRowsRequest( + read_stream="read_stream_value", + ) # Make the request stream = await client.read_rows(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py index 4f780989f2f..1b8dca04047 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py @@ -32,7 +32,9 @@ def sample_read_rows(): client = bigquery_storage_v1.BigQueryReadClient() # Initialize request argument(s) - request = bigquery_storage_v1.ReadRowsRequest(read_stream="read_stream_value",) + request = bigquery_storage_v1.ReadRowsRequest( + read_stream="read_stream_value", + ) # Make the request stream = client.read_rows(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py index 7d228fc7baf..f58e887d201 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py @@ -32,7 +32,9 @@ async def sample_split_read_stream(): client = bigquery_storage_v1.BigQueryReadAsyncClient() # Initialize request argument(s) - request = bigquery_storage_v1.SplitReadStreamRequest(name="name_value",) + request = bigquery_storage_v1.SplitReadStreamRequest( + name="name_value", + ) # Make the request response = await client.split_read_stream(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py index 4e75570ff18..6a44fc92afd 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py @@ -32,7 +32,9 @@ def sample_split_read_stream(): client = bigquery_storage_v1.BigQueryReadClient() # Initialize request argument(s) - request = bigquery_storage_v1.SplitReadStreamRequest(name="name_value",) + request = bigquery_storage_v1.SplitReadStreamRequest( + name="name_value", + ) # Make the request response = client.split_read_stream(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py index 01b023563d8..0e11ca4b473 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py @@ -32,7 +32,9 @@ async def sample_append_rows(): client = bigquery_storage_v1.BigQueryWriteAsyncClient() # Initialize request argument(s) - request = bigquery_storage_v1.AppendRowsRequest(write_stream="write_stream_value",) + request = bigquery_storage_v1.AppendRowsRequest( + write_stream="write_stream_value", + ) # This method expects an iterator which contains # 'bigquery_storage_v1.AppendRowsRequest' objects diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py index 14a89c4b46a..6fe414b18ab 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py @@ -32,7 +32,9 @@ def sample_append_rows(): client = bigquery_storage_v1.BigQueryWriteClient() # Initialize request argument(s) - request = bigquery_storage_v1.AppendRowsRequest(write_stream="write_stream_value",) + request = bigquery_storage_v1.AppendRowsRequest( + write_stream="write_stream_value", + ) # This method expects an iterator which contains # 'bigquery_storage_v1.AppendRowsRequest' objects diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py index 23201452a15..2f108e8ccc2 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py @@ -32,7 +32,9 @@ async def sample_create_write_stream(): client = bigquery_storage_v1.BigQueryWriteAsyncClient() # Initialize request argument(s) - request = bigquery_storage_v1.CreateWriteStreamRequest(parent="parent_value",) + request = bigquery_storage_v1.CreateWriteStreamRequest( + parent="parent_value", + ) # Make the request response = await client.create_write_stream(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py index 5ebc4468ccf..c1c482048fd 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py @@ -32,7 +32,9 @@ def sample_create_write_stream(): client = bigquery_storage_v1.BigQueryWriteClient() # Initialize request argument(s) - request = bigquery_storage_v1.CreateWriteStreamRequest(parent="parent_value",) + request = bigquery_storage_v1.CreateWriteStreamRequest( + parent="parent_value", + ) # Make the request response = client.create_write_stream(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py index 426cfc9226d..508f16b9e23 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py @@ -32,7 +32,9 @@ async def sample_finalize_write_stream(): client = bigquery_storage_v1.BigQueryWriteAsyncClient() # Initialize request argument(s) - request = bigquery_storage_v1.FinalizeWriteStreamRequest(name="name_value",) + request = bigquery_storage_v1.FinalizeWriteStreamRequest( + name="name_value", + ) # Make the request response = await client.finalize_write_stream(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py index f86e15f72e7..14eb6e3fe6d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py @@ -32,7 +32,9 @@ def sample_finalize_write_stream(): client = bigquery_storage_v1.BigQueryWriteClient() # Initialize request argument(s) - request = bigquery_storage_v1.FinalizeWriteStreamRequest(name="name_value",) + request = bigquery_storage_v1.FinalizeWriteStreamRequest( + name="name_value", + ) # Make the request response = client.finalize_write_stream(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py index 849b6759e46..766b493f576 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py @@ -32,7 +32,9 @@ async def sample_flush_rows(): client = bigquery_storage_v1.BigQueryWriteAsyncClient() # Initialize request argument(s) - request = bigquery_storage_v1.FlushRowsRequest(write_stream="write_stream_value",) + request = bigquery_storage_v1.FlushRowsRequest( + write_stream="write_stream_value", + ) # Make the request response = await client.flush_rows(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py index 6881767eb7b..e4dd727aa33 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py @@ -32,7 +32,9 @@ def sample_flush_rows(): client = bigquery_storage_v1.BigQueryWriteClient() # Initialize request argument(s) - request = bigquery_storage_v1.FlushRowsRequest(write_stream="write_stream_value",) + request = bigquery_storage_v1.FlushRowsRequest( + write_stream="write_stream_value", + ) # Make the request response = client.flush_rows(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py index c00834717f6..22c5ac75e70 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py @@ -32,7 +32,9 @@ async def sample_get_write_stream(): client = bigquery_storage_v1.BigQueryWriteAsyncClient() # Initialize request argument(s) - request = bigquery_storage_v1.GetWriteStreamRequest(name="name_value",) + request = bigquery_storage_v1.GetWriteStreamRequest( + name="name_value", + ) # Make the request response = await client.get_write_stream(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py index e292241b763..4954a257317 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py @@ -32,7 +32,9 @@ def sample_get_write_stream(): client = bigquery_storage_v1.BigQueryWriteClient() # Initialize request argument(s) - request = bigquery_storage_v1.GetWriteStreamRequest(name="name_value",) + request = bigquery_storage_v1.GetWriteStreamRequest( + name="name_value", + ) # Make the request response = client.get_write_stream(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py index 0890e640e0f..3b2aa376520 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py @@ -32,7 +32,9 @@ async def sample_create_read_session(): client = bigquery_storage_v1beta2.BigQueryReadAsyncClient() # Initialize request argument(s) - request = bigquery_storage_v1beta2.CreateReadSessionRequest(parent="parent_value",) + request = bigquery_storage_v1beta2.CreateReadSessionRequest( + parent="parent_value", + ) # Make the request response = await client.create_read_session(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py index 4c60e7cea4b..46e346c2593 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py @@ -32,7 +32,9 @@ def sample_create_read_session(): client = bigquery_storage_v1beta2.BigQueryReadClient() # Initialize request argument(s) - request = bigquery_storage_v1beta2.CreateReadSessionRequest(parent="parent_value",) + request = bigquery_storage_v1beta2.CreateReadSessionRequest( + parent="parent_value", + ) # Make the request response = client.create_read_session(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py index 7e5d879d30b..2f8be0a4342 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py @@ -32,7 +32,9 @@ async def sample_read_rows(): client = bigquery_storage_v1beta2.BigQueryReadAsyncClient() # Initialize request argument(s) - request = bigquery_storage_v1beta2.ReadRowsRequest(read_stream="read_stream_value",) + request = bigquery_storage_v1beta2.ReadRowsRequest( + read_stream="read_stream_value", + ) # Make the request stream = await client.read_rows(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py index 4a968671cc7..770e00fe005 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py @@ -32,7 +32,9 @@ def sample_read_rows(): client = bigquery_storage_v1beta2.BigQueryReadClient() # Initialize request argument(s) - request = bigquery_storage_v1beta2.ReadRowsRequest(read_stream="read_stream_value",) + request = bigquery_storage_v1beta2.ReadRowsRequest( + read_stream="read_stream_value", + ) # Make the request stream = client.read_rows(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py index 6b775ab2dfa..ab8cca952de 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py @@ -32,7 +32,9 @@ async def sample_split_read_stream(): client = bigquery_storage_v1beta2.BigQueryReadAsyncClient() # Initialize request argument(s) - request = bigquery_storage_v1beta2.SplitReadStreamRequest(name="name_value",) + request = bigquery_storage_v1beta2.SplitReadStreamRequest( + name="name_value", + ) # Make the request response = await client.split_read_stream(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py index 6e7a4259d73..e82619c5706 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py @@ -32,7 +32,9 @@ def sample_split_read_stream(): client = bigquery_storage_v1beta2.BigQueryReadClient() # Initialize request argument(s) - request = bigquery_storage_v1beta2.SplitReadStreamRequest(name="name_value",) + request = bigquery_storage_v1beta2.SplitReadStreamRequest( + name="name_value", + ) # Make the request response = client.split_read_stream(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py index 7e0eb529a5d..c033128673f 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py @@ -32,7 +32,9 @@ async def sample_create_write_stream(): client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() # Initialize request argument(s) - request = bigquery_storage_v1beta2.CreateWriteStreamRequest(parent="parent_value",) + request = bigquery_storage_v1beta2.CreateWriteStreamRequest( + parent="parent_value", + ) # Make the request response = await client.create_write_stream(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py index a16af5af719..b3d72ded20d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py @@ -32,7 +32,9 @@ def sample_create_write_stream(): client = bigquery_storage_v1beta2.BigQueryWriteClient() # Initialize request argument(s) - request = bigquery_storage_v1beta2.CreateWriteStreamRequest(parent="parent_value",) + request = bigquery_storage_v1beta2.CreateWriteStreamRequest( + parent="parent_value", + ) # Make the request response = client.create_write_stream(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py index c208ac3c2e1..065f36088dd 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py @@ -32,7 +32,9 @@ async def sample_finalize_write_stream(): client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() # Initialize request argument(s) - request = bigquery_storage_v1beta2.FinalizeWriteStreamRequest(name="name_value",) + request = bigquery_storage_v1beta2.FinalizeWriteStreamRequest( + name="name_value", + ) # Make the request response = await client.finalize_write_stream(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py index 56c7ef44c31..fa6f9c4f496 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py @@ -32,7 +32,9 @@ def sample_finalize_write_stream(): client = bigquery_storage_v1beta2.BigQueryWriteClient() # Initialize request argument(s) - request = bigquery_storage_v1beta2.FinalizeWriteStreamRequest(name="name_value",) + request = bigquery_storage_v1beta2.FinalizeWriteStreamRequest( + name="name_value", + ) # Make the request response = client.finalize_write_stream(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py index a84c8fd6f3d..c3e92057b19 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py @@ -32,7 +32,9 @@ async def sample_get_write_stream(): client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() # Initialize request argument(s) - request = bigquery_storage_v1beta2.GetWriteStreamRequest(name="name_value",) + request = bigquery_storage_v1beta2.GetWriteStreamRequest( + name="name_value", + ) # Make the request response = await client.get_write_stream(request=request) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py index c6d3050af4c..78bf2f7807e 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py @@ -32,7 +32,9 @@ def sample_get_write_stream(): client = bigquery_storage_v1beta2.BigQueryWriteClient() # Initialize request argument(s) - request = bigquery_storage_v1beta2.GetWriteStreamRequest(name="name_value",) + request = bigquery_storage_v1beta2.GetWriteStreamRequest( + name="name_value", + ) # Make the request response = client.get_write_stream(request=request) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index 85f5836dba3..25f87a215d4 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -29,7 +29,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -253,7 +253,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/bigquery_storage/snippets/append_rows_proto2_test.py b/bigquery_storage/snippets/append_rows_proto2_test.py index dddda30129e..d35e9a78912 100644 --- a/bigquery_storage/snippets/append_rows_proto2_test.py +++ b/bigquery_storage/snippets/append_rows_proto2_test.py @@ -116,7 +116,10 @@ def test_append_rows_proto2( ), ) in row_items assert (("int64_list", [1, 2, 3]), ("row_num", 13)) in row_items - assert (("row_num", 14), ("struct_col", {"sub_int_col": 7}),) in row_items + assert ( + ("row_num", 14), + ("struct_col", {"sub_int_col": 7}), + ) in row_items assert ( ("row_num", 15), ( diff --git a/bigquery_storage/snippets/noxfile.py b/bigquery_storage/snippets/noxfile.py index 85f5836dba3..25f87a215d4 100644 --- a/bigquery_storage/snippets/noxfile.py +++ b/bigquery_storage/snippets/noxfile.py @@ -29,7 +29,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -253,7 +253,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/bigquery_storage/snippets/sample_data_pb2.py b/bigquery_storage/snippets/sample_data_pb2.py index ba524988a36..b1478016fc9 100644 --- a/bigquery_storage/snippets/sample_data_pb2.py +++ b/bigquery_storage/snippets/sample_data_pb2.py @@ -376,7 +376,9 @@ ), ], extensions=[], - nested_types=[_SAMPLEDATA_SAMPLESTRUCT,], + nested_types=[ + _SAMPLEDATA_SAMPLESTRUCT, + ], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index 85f5836dba3..25f87a215d4 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -29,7 +29,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -253,7 +253,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/bigquery_storage/to_dataframe/read_table_bqstorage.py b/bigquery_storage/to_dataframe/read_table_bqstorage.py index 0a3ae777867..a0b1c49bc2e 100644 --- a/bigquery_storage/to_dataframe/read_table_bqstorage.py +++ b/bigquery_storage/to_dataframe/read_table_bqstorage.py @@ -49,7 +49,9 @@ def read_table(your_project_id): read_options=read_options, ) read_session = bqstorageclient.create_read_session( - parent=parent, read_session=requested_session, max_stream_count=1, + parent=parent, + read_session=requested_session, + max_stream_count=1, ) # This example reads from only a single stream. Read from multiple streams From d0f46ad8d50a8578a2097db90965e1e6e6870048 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 30 Mar 2022 13:56:54 +0200 Subject: [PATCH 151/338] chore(deps): update all dependencies (#415) * chore(deps): update all dependencies * chore: revert changes for pinned dependencies Co-authored-by: Anthonios Partheniou --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 8 ++++---- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index c2845bffbe8..4f6bf643fc5 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==7.0.1 +pytest==7.1.1 diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index f770a1599e5..55ab6f0398c 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.12.0 +google-cloud-bigquery-storage==2.13.0 diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 048b49808a9..e227954a1e2 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ google-cloud-testutils==1.3.1 -pytest==7.0.1 +pytest==7.1.1 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 9ac3d905741..aa867ccf3ca 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-bigquery-storage==2.12.0 +google-cloud-bigquery-storage==2.13.0 google-cloud-bigquery==2.34.2 protobuf==3.19.4 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index c2845bffbe8..4f6bf643fc5 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==7.0.1 +pytest==7.1.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 6eaeef49f48..bece9d18aad 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,10 +1,10 @@ -google-auth==2.6.0 -google-cloud-bigquery-storage==2.12.0 +google-auth==2.6.2 +google-cloud-bigquery-storage==2.13.0 google-cloud-bigquery==2.34.2 pyarrow==7.0.0 ipython==7.31.1; python_version == '3.7' ipython==8.0.1; python_version == '3.8' -ipython==8.1.1; python_version >= '3.9' +ipython==8.2.0; python_version >= '3.9' pandas==1.3.5; python_version == '3.7' pandas==1.4.1; python_version > '3.7' -tqdm==4.63.0 +tqdm==4.63.1 From 3f97ea9bf7f994bc7a1d9aefe01bdcce6a75ca99 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 30 Mar 2022 15:02:38 +0200 Subject: [PATCH 152/338] chore(deps): use === in requirements.txt for environment specific pins (#419) * chore(deps): update all dependencies * revert changes and ignore future changes for environment specific pins Co-authored-by: Anthonios Partheniou --- bigquery_storage/to_dataframe/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index bece9d18aad..32cb45e666f 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -2,9 +2,9 @@ google-auth==2.6.2 google-cloud-bigquery-storage==2.13.0 google-cloud-bigquery==2.34.2 pyarrow==7.0.0 -ipython==7.31.1; python_version == '3.7' -ipython==8.0.1; python_version == '3.8' +ipython===7.31.1; python_version == '3.7' +ipython===8.0.1; python_version == '3.8' ipython==8.2.0; python_version >= '3.9' -pandas==1.3.5; python_version == '3.7' +pandas===1.3.5; python_version == '3.7' pandas==1.4.1; python_version > '3.7' tqdm==4.63.1 From d189a1845a68db9d705998fb465d1bde1f02906f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 30 Mar 2022 23:48:46 +0200 Subject: [PATCH 153/338] chore(deps): update dependency google-cloud-bigquery to v2.34.3 (#422) --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index aa867ccf3ca..3c070d95f9b 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ google-cloud-bigquery-storage==2.13.0 -google-cloud-bigquery==2.34.2 +google-cloud-bigquery==2.34.3 protobuf==3.19.4 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 32cb45e666f..5401f8c7b54 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==2.6.2 google-cloud-bigquery-storage==2.13.0 -google-cloud-bigquery==2.34.2 +google-cloud-bigquery==2.34.3 pyarrow==7.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' From e051aa0a910861fc05bb32850419ec4d3fb53d8d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sun, 3 Apr 2022 22:08:42 +0200 Subject: [PATCH 154/338] chore(deps): update all dependencies (#428) * chore(deps): update all dependencies * samples(deps): add dependency on db-dtypes Co-authored-by: Anthonios Partheniou --- bigquery_storage/snippets/requirements.txt | 4 ++-- bigquery_storage/to_dataframe/requirements.txt | 5 +++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 3c070d95f9b..f8015663d6c 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ google-cloud-bigquery-storage==2.13.0 -google-cloud-bigquery==2.34.3 -protobuf==3.19.4 +google-cloud-bigquery==3.0.1 +protobuf==3.20.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 5401f8c7b54..6d27e31c02e 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,10 +1,11 @@ google-auth==2.6.2 google-cloud-bigquery-storage==2.13.0 -google-cloud-bigquery==2.34.3 +google-cloud-bigquery==3.0.1 pyarrow==7.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' ipython==8.2.0; python_version >= '3.9' pandas===1.3.5; python_version == '3.7' -pandas==1.4.1; python_version > '3.7' +pandas==1.4.2; python_version > '3.7' tqdm==4.63.1 +db-dtypes==1.0.0 From 92b42e54b8ad38c17aa5ac7fdd06a316363353a9 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 4 Apr 2022 16:05:10 +0200 Subject: [PATCH 155/338] chore(deps): update dependency tqdm to v4.64.0 (#430) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 6d27e31c02e..5b173259240 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -7,5 +7,5 @@ ipython===8.0.1; python_version == '3.8' ipython==8.2.0; python_version >= '3.9' pandas===1.3.5; python_version == '3.7' pandas==1.4.2; python_version > '3.7' -tqdm==4.63.1 +tqdm==4.64.0 db-dtypes==1.0.0 From a3353b3c63eddfe54fa9837299aaba356c5f289d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 5 Apr 2022 22:26:42 +0200 Subject: [PATCH 156/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.13.1 (#432) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 55ab6f0398c..1ebe3ee421d 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.13.0 +google-cloud-bigquery-storage==2.13.1 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index f8015663d6c..77fef07e9b3 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-bigquery-storage==2.13.0 +google-cloud-bigquery-storage==2.13.1 google-cloud-bigquery==3.0.1 protobuf==3.20.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 5b173259240..75eb5d8718e 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.6.2 -google-cloud-bigquery-storage==2.13.0 +google-cloud-bigquery-storage==2.13.1 google-cloud-bigquery==3.0.1 pyarrow==7.0.0 ipython===7.31.1; python_version == '3.7' From 5bf41279e5bf27a8eb26e4dcf1b8f43fdb3fe469 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 8 Apr 2022 01:12:41 +0200 Subject: [PATCH 157/338] chore(deps): update dependency google-auth to v2.6.3 (#435) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 75eb5d8718e..40a2aa502ed 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.6.2 +google-auth==2.6.3 google-cloud-bigquery-storage==2.13.1 google-cloud-bigquery==3.0.1 pyarrow==7.0.0 From a68de89b385fd6b73b9cf11d7fd540e3fddbcdad Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 13 Apr 2022 00:50:38 +0200 Subject: [PATCH 158/338] chore(deps): update dependency google-auth to v2.6.4 (#438) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 40a2aa502ed..087dd9284bb 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.6.3 +google-auth==2.6.4 google-cloud-bigquery-storage==2.13.1 google-cloud-bigquery==3.0.1 pyarrow==7.0.0 From b0d397024036339cb2db3c5fbd774fd23d3ee064 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 13 Apr 2022 20:33:03 -0400 Subject: [PATCH 159/338] chore: use gapic-generator-python 0.65.1 (#439) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.65.1 PiperOrigin-RevId: 441524537 Source-Link: https://github.com/googleapis/googleapis/commit/2a273915b3f70fe86c9d2a75470a0b83e48d0abf Source-Link: https://github.com/googleapis/googleapis-gen/commit/ab6756a48c89b5bcb9fb73443cb8e55d574f4643 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWI2NzU2YTQ4Yzg5YjViY2I5ZmI3MzQ0M2NiOGU1NWQ1NzRmNDY0MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../snippet_metadata_bigquery storage_v1.json | 746 +++++++++++++++++- ...pet_metadata_bigquery storage_v1beta2.json | 746 +++++++++++++++++- 2 files changed, 1420 insertions(+), 72 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json index 7a4af13b766..fffc63cbc83 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json @@ -1,16 +1,69 @@ { + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.bigquery.storage.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-bigquery-storage" + }, "snippets": [ { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient", + "shortName": "BigQueryReadAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient.create_read_session", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", "shortName": "BigQueryRead" }, "shortName": "CreateReadSession" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.CreateReadSessionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "read_session", + "type": "google.cloud.bigquery_storage_v1.types.ReadSession" + }, + { + "name": "max_stream_count", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.ReadSession", + "shortName": "create_read_session" }, + "description": "Sample for CreateReadSession", "file": "bigquerystorage_v1_generated_big_query_read_create_read_session_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async", "segments": [ { @@ -43,18 +96,62 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_read_create_read_session_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient", + "shortName": "BigQueryReadClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient.create_read_session", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", "shortName": "BigQueryRead" }, "shortName": "CreateReadSession" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.CreateReadSessionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "read_session", + "type": "google.cloud.bigquery_storage_v1.types.ReadSession" + }, + { + "name": "max_stream_count", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.ReadSession", + "shortName": "create_read_session" }, + "description": "Sample for CreateReadSession", "file": "bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_sync", "segments": [ { @@ -87,19 +184,59 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient", + "shortName": "BigQueryReadAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient.read_rows", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", "shortName": "BigQueryRead" }, "shortName": "ReadRows" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.ReadRowsRequest" + }, + { + "name": "read_stream", + "type": "str" + }, + { + "name": "offset", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.ReadRowsResponse]", + "shortName": "read_rows" }, + "description": "Sample for ReadRows", "file": "bigquerystorage_v1_generated_big_query_read_read_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_async", "segments": [ { @@ -132,18 +269,58 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_read_read_rows_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient", + "shortName": "BigQueryReadClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient.read_rows", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", "shortName": "BigQueryRead" }, "shortName": "ReadRows" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.ReadRowsRequest" + }, + { + "name": "read_stream", + "type": "str" + }, + { + "name": "offset", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.ReadRowsResponse]", + "shortName": "read_rows" }, + "description": "Sample for ReadRows", "file": "bigquerystorage_v1_generated_big_query_read_read_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_sync", "segments": [ { @@ -176,19 +353,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_read_read_rows_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient", + "shortName": "BigQueryReadAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient.split_read_stream", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", "shortName": "BigQueryRead" }, "shortName": "SplitReadStream" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.SplitReadStreamRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.SplitReadStreamResponse", + "shortName": "split_read_stream" }, + "description": "Sample for SplitReadStream", "file": "bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async", "segments": [ { @@ -221,18 +430,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient", + "shortName": "BigQueryReadClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient.split_read_stream", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", "shortName": "BigQueryRead" }, "shortName": "SplitReadStream" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.SplitReadStreamRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.SplitReadStreamResponse", + "shortName": "split_read_stream" }, + "description": "Sample for SplitReadStream", "file": "bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_sync", "segments": [ { @@ -265,19 +506,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.append_rows", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "AppendRows" - } + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.bigquery_storage_v1.types.AppendRowsRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.AppendRowsResponse]", + "shortName": "append_rows" }, + "description": "Sample for AppendRows", "file": "bigquerystorage_v1_generated_big_query_write_append_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async", "segments": [ { @@ -310,18 +583,50 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_write_append_rows_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.append_rows", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "AppendRows" - } + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.bigquery_storage_v1.types.AppendRowsRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.AppendRowsResponse]", + "shortName": "append_rows" }, + "description": "Sample for AppendRows", "file": "bigquerystorage_v1_generated_big_query_write_append_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_sync", "segments": [ { @@ -354,19 +659,55 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_write_append_rows_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.batch_commit_write_streams", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "BatchCommitWriteStreams" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsResponse", + "shortName": "batch_commit_write_streams" }, + "description": "Sample for BatchCommitWriteStreams", "file": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async", "segments": [ { @@ -399,18 +740,54 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.batch_commit_write_streams", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "BatchCommitWriteStreams" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsResponse", + "shortName": "batch_commit_write_streams" }, + "description": "Sample for BatchCommitWriteStreams", "file": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_sync", "segments": [ { @@ -443,19 +820,59 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.create_write_stream", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "CreateWriteStream" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.CreateWriteStreamRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "write_stream", + "type": "google.cloud.bigquery_storage_v1.types.WriteStream" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", + "shortName": "create_write_stream" }, + "description": "Sample for CreateWriteStream", "file": "bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async", "segments": [ { @@ -488,18 +905,58 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.create_write_stream", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "CreateWriteStream" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.CreateWriteStreamRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "write_stream", + "type": "google.cloud.bigquery_storage_v1.types.WriteStream" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", + "shortName": "create_write_stream" }, + "description": "Sample for CreateWriteStream", "file": "bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_sync", "segments": [ { @@ -532,19 +989,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.finalize_write_stream", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "FinalizeWriteStream" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamResponse", + "shortName": "finalize_write_stream" }, + "description": "Sample for FinalizeWriteStream", "file": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async", "segments": [ { @@ -577,18 +1070,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.finalize_write_stream", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "FinalizeWriteStream" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamResponse", + "shortName": "finalize_write_stream" }, + "description": "Sample for FinalizeWriteStream", "file": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_sync", "segments": [ { @@ -621,19 +1150,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.flush_rows", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "FlushRows" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.FlushRowsRequest" + }, + { + "name": "write_stream", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.FlushRowsResponse", + "shortName": "flush_rows" }, + "description": "Sample for FlushRows", "file": "bigquerystorage_v1_generated_big_query_write_flush_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async", "segments": [ { @@ -666,18 +1231,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_write_flush_rows_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.flush_rows", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "FlushRows" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.FlushRowsRequest" + }, + { + "name": "write_stream", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.FlushRowsResponse", + "shortName": "flush_rows" }, + "description": "Sample for FlushRows", "file": "bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_sync", "segments": [ { @@ -710,19 +1311,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.get_write_stream", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "GetWriteStream" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.GetWriteStreamRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", + "shortName": "get_write_stream" }, + "description": "Sample for GetWriteStream", "file": "bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async", "segments": [ { @@ -755,18 +1392,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.get_write_stream", "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream", "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "GetWriteStream" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.GetWriteStreamRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", + "shortName": "get_write_stream" }, + "description": "Sample for GetWriteStream", "file": "bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_sync", "segments": [ { @@ -799,7 +1472,8 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py" } ] } diff --git a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json index 09d8681da8b..fccea8301cc 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json @@ -1,16 +1,69 @@ { + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.bigquery.storage.v1beta2", + "version": "v1beta2" + } + ], + "language": "PYTHON", + "name": "google-cloud-bigquery-storage" + }, "snippets": [ { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient", + "shortName": "BigQueryReadAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient.create_read_session", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.CreateReadSession", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", "shortName": "BigQueryRead" }, "shortName": "CreateReadSession" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.CreateReadSessionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "read_session", + "type": "google.cloud.bigquery_storage_v1beta2.types.ReadSession" + }, + { + "name": "max_stream_count", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.ReadSession", + "shortName": "create_read_session" }, + "description": "Sample for CreateReadSession", "file": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_async", "segments": [ { @@ -43,18 +96,62 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient", + "shortName": "BigQueryReadClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient.create_read_session", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.CreateReadSession", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", "shortName": "BigQueryRead" }, "shortName": "CreateReadSession" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.CreateReadSessionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "read_session", + "type": "google.cloud.bigquery_storage_v1beta2.types.ReadSession" + }, + { + "name": "max_stream_count", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.ReadSession", + "shortName": "create_read_session" }, + "description": "Sample for CreateReadSession", "file": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_sync", "segments": [ { @@ -87,19 +184,59 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient", + "shortName": "BigQueryReadAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient.read_rows", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.ReadRows", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", "shortName": "BigQueryRead" }, "shortName": "ReadRows" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.ReadRowsRequest" + }, + { + "name": "read_stream", + "type": "str" + }, + { + "name": "offset", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.ReadRowsResponse]", + "shortName": "read_rows" }, + "description": "Sample for ReadRows", "file": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_async", "segments": [ { @@ -132,18 +269,58 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient", + "shortName": "BigQueryReadClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient.read_rows", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.ReadRows", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", "shortName": "BigQueryRead" }, "shortName": "ReadRows" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.ReadRowsRequest" + }, + { + "name": "read_stream", + "type": "str" + }, + { + "name": "offset", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.ReadRowsResponse]", + "shortName": "read_rows" }, + "description": "Sample for ReadRows", "file": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_sync", "segments": [ { @@ -176,19 +353,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient", + "shortName": "BigQueryReadAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient.split_read_stream", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.SplitReadStream", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", "shortName": "BigQueryRead" }, "shortName": "SplitReadStream" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamResponse", + "shortName": "split_read_stream" }, + "description": "Sample for SplitReadStream", "file": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_async", "segments": [ { @@ -221,18 +430,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient", + "shortName": "BigQueryReadClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient.split_read_stream", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.SplitReadStream", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", "shortName": "BigQueryRead" }, "shortName": "SplitReadStream" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamResponse", + "shortName": "split_read_stream" }, + "description": "Sample for SplitReadStream", "file": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_sync", "segments": [ { @@ -265,19 +506,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.append_rows", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.AppendRows", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "AppendRows" - } + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.bigquery_storage_v1beta2.types.AppendRowsRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.AppendRowsResponse]", + "shortName": "append_rows" }, + "description": "Sample for AppendRows", "file": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_async", "segments": [ { @@ -310,18 +583,50 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.append_rows", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.AppendRows", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "AppendRows" - } + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.bigquery_storage_v1beta2.types.AppendRowsRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.AppendRowsResponse]", + "shortName": "append_rows" }, + "description": "Sample for AppendRows", "file": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_sync", "segments": [ { @@ -354,19 +659,55 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.batch_commit_write_streams", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.BatchCommitWriteStreams", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "BatchCommitWriteStreams" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsResponse", + "shortName": "batch_commit_write_streams" }, + "description": "Sample for BatchCommitWriteStreams", "file": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_async", "segments": [ { @@ -399,18 +740,54 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.batch_commit_write_streams", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.BatchCommitWriteStreams", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "BatchCommitWriteStreams" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsResponse", + "shortName": "batch_commit_write_streams" }, + "description": "Sample for BatchCommitWriteStreams", "file": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_sync", "segments": [ { @@ -443,19 +820,59 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.create_write_stream", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.CreateWriteStream", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "CreateWriteStream" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.CreateWriteStreamRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "write_stream", + "type": "google.cloud.bigquery_storage_v1beta2.types.WriteStream" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", + "shortName": "create_write_stream" }, + "description": "Sample for CreateWriteStream", "file": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_async", "segments": [ { @@ -488,18 +905,58 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.create_write_stream", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.CreateWriteStream", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "CreateWriteStream" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.CreateWriteStreamRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "write_stream", + "type": "google.cloud.bigquery_storage_v1beta2.types.WriteStream" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", + "shortName": "create_write_stream" }, + "description": "Sample for CreateWriteStream", "file": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_sync", "segments": [ { @@ -532,19 +989,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.finalize_write_stream", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FinalizeWriteStream", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "FinalizeWriteStream" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamResponse", + "shortName": "finalize_write_stream" }, + "description": "Sample for FinalizeWriteStream", "file": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_async", "segments": [ { @@ -577,18 +1070,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.finalize_write_stream", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FinalizeWriteStream", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "FinalizeWriteStream" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamResponse", + "shortName": "finalize_write_stream" }, + "description": "Sample for FinalizeWriteStream", "file": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_sync", "segments": [ { @@ -621,19 +1150,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.flush_rows", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FlushRows", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "FlushRows" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsRequest" + }, + { + "name": "write_stream", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsResponse", + "shortName": "flush_rows" }, + "description": "Sample for FlushRows", "file": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_async", "segments": [ { @@ -666,18 +1231,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.flush_rows", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FlushRows", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "FlushRows" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsRequest" + }, + { + "name": "write_stream", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsResponse", + "shortName": "flush_rows" }, + "description": "Sample for FlushRows", "file": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_sync", "segments": [ { @@ -710,19 +1311,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.get_write_stream", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.GetWriteStream", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "GetWriteStream" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.GetWriteStreamRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", + "shortName": "get_write_stream" }, + "description": "Sample for GetWriteStream", "file": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_async", "segments": [ { @@ -755,18 +1392,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.get_write_stream", "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.GetWriteStream", "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", "shortName": "BigQueryWrite" }, "shortName": "GetWriteStream" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.GetWriteStreamRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", + "shortName": "get_write_stream" }, + "description": "Sample for GetWriteStream", "file": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_sync", "segments": [ { @@ -799,7 +1472,8 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py" } ] } From ec1115ece29eb767e95cd49c9f0409a785d69755 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 15 Apr 2022 02:44:08 +0200 Subject: [PATCH 160/338] chore(deps): update dependency google-auth to v2.6.5 (#440) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 087dd9284bb..8fce1126881 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.6.4 +google-auth==2.6.5 google-cloud-bigquery-storage==2.13.1 google-cloud-bigquery==3.0.1 pyarrow==7.0.0 From 618a38da9f70793b95b462596da949d59d726bae Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 20 Apr 2022 20:15:47 -0400 Subject: [PATCH 161/338] chore(python): add nox session to sort python imports (#441) Source-Link: https://github.com/googleapis/synthtool/commit/1b71c10e20de7ed3f97f692f99a0e3399b67049f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 Co-authored-by: Owl Bot --- bigquery_storage/quickstart/noxfile.py | 22 ++++++++++++++++++++++ bigquery_storage/snippets/noxfile.py | 22 ++++++++++++++++++++++ bigquery_storage/to_dataframe/noxfile.py | 22 ++++++++++++++++++++++ 3 files changed, 66 insertions(+) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index 25f87a215d4..a40410b5636 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -30,6 +30,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -168,12 +169,33 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) +# +# format = isort + black +# + + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + # # Sample Tests # diff --git a/bigquery_storage/snippets/noxfile.py b/bigquery_storage/snippets/noxfile.py index 25f87a215d4..a40410b5636 100644 --- a/bigquery_storage/snippets/noxfile.py +++ b/bigquery_storage/snippets/noxfile.py @@ -30,6 +30,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -168,12 +169,33 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) +# +# format = isort + black +# + + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + # # Sample Tests # diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index 25f87a215d4..a40410b5636 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -30,6 +30,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -168,12 +169,33 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) +# +# format = isort + black +# + + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + # # Sample Tests # From 11bb5ed98c2b97084983ee6f392321c27d4dd618 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 22 Apr 2022 10:29:10 +0200 Subject: [PATCH 162/338] chore(deps): update all dependencies (#444) --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 77fef07e9b3..ab38f5c6cd5 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ google-cloud-bigquery-storage==2.13.1 google-cloud-bigquery==3.0.1 -protobuf==3.20.0 +protobuf==3.20.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 8fce1126881..74cce60c662 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.6.5 +google-auth==2.6.6 google-cloud-bigquery-storage==2.13.1 google-cloud-bigquery==3.0.1 pyarrow==7.0.0 From 646c5433ff7ebfe0063332cdef5c965e87c49116 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 25 Apr 2022 17:01:55 +0200 Subject: [PATCH 163/338] chore(deps): update dependency pytest to v7.1.2 (#445) --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 4f6bf643fc5..d00689e0623 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.1 +pytest==7.1.2 diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index e227954a1e2..0b8f81a49d1 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ google-cloud-testutils==1.3.1 -pytest==7.1.1 +pytest==7.1.2 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 4f6bf643fc5..d00689e0623 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.1 +pytest==7.1.2 From c215af17d45fd23394974cffe6a43568651bf1c1 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 29 Apr 2022 19:02:20 +0200 Subject: [PATCH 164/338] chore(deps): update dependency ipython to v8.3.0 (#447) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 74cce60c662..878e436ba13 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -4,7 +4,7 @@ google-cloud-bigquery==3.0.1 pyarrow==7.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' -ipython==8.2.0; python_version >= '3.9' +ipython==8.3.0; python_version >= '3.9' pandas===1.3.5; python_version == '3.7' pandas==1.4.2; python_version > '3.7' tqdm==4.64.0 From 53bac972f0cca6af467bbcde28a2df6d9bbd85fd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 9 May 2022 22:14:08 +0200 Subject: [PATCH 165/338] chore: update all dependencies (#451) * chore(deps): update all dependencies * samples(deps): add dependency on db-dtypes * chore(deps): update dependency pyarrow to v8 * chore: update dependency db-dtypes to 1.0.1 * fix typo Co-authored-by: Anthonios Partheniou --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index ab38f5c6cd5..12079cf70da 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ google-cloud-bigquery-storage==2.13.1 -google-cloud-bigquery==3.0.1 +google-cloud-bigquery==3.1.0 protobuf==3.20.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 878e436ba13..3b4800a8487 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,11 +1,11 @@ google-auth==2.6.6 google-cloud-bigquery-storage==2.13.1 -google-cloud-bigquery==3.0.1 -pyarrow==7.0.0 +google-cloud-bigquery==3.1.0 +pyarrow==8.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' ipython==8.3.0; python_version >= '3.9' pandas===1.3.5; python_version == '3.7' pandas==1.4.2; python_version > '3.7' tqdm==4.64.0 -db-dtypes==1.0.0 +db-dtypes==1.0.1 From 73f27652f568348f2297094a50e30ae3ca4367df Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 7 Jun 2022 16:33:07 +0200 Subject: [PATCH 166/338] chore(deps): update all dependencies (#455) * chore(deps): update all dependencies * revert protobuf Co-authored-by: Anthonios Partheniou --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 3b4800a8487..99a91731c4a 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -4,7 +4,7 @@ google-cloud-bigquery==3.1.0 pyarrow==8.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' -ipython==8.3.0; python_version >= '3.9' +ipython==8.4.0; python_version >= '3.9' pandas===1.3.5; python_version == '3.7' pandas==1.4.2; python_version > '3.7' tqdm==4.64.0 From 2fa5736f368a26ec29a35bf2aa79483eda68fa66 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 8 Jul 2022 12:43:10 -0400 Subject: [PATCH 167/338] fix: require python 3.7+ (#468) * chore(python): drop python 3.6 Source-Link: https://github.com/googleapis/synthtool/commit/4f89b13af10d086458f9b379e56a614f9d6dab7b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c * require python 3.7+ in setup.py * remove python 3.6 sample configs Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- bigquery_storage/quickstart/noxfile.py | 2 +- bigquery_storage/snippets/noxfile.py | 2 +- bigquery_storage/to_dataframe/noxfile.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index a40410b5636..29b5bc85218 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/bigquery_storage/snippets/noxfile.py b/bigquery_storage/snippets/noxfile.py index a40410b5636..29b5bc85218 100644 --- a/bigquery_storage/snippets/noxfile.py +++ b/bigquery_storage/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index a40410b5636..29b5bc85218 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From 30eaa10db26e2cb49b1566c0ee5ec88507598ca0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 8 Jul 2022 10:31:57 -0700 Subject: [PATCH 168/338] fix(deps): require google-api-core >= 2.8.0 (#465) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 1.1.1 PiperOrigin-RevId: 459095142 Source-Link: https://github.com/googleapis/googleapis/commit/4f1be992601ed740a581a32cedc4e7b6c6a27793 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ae686d9cde4fc3e36d0ac02efb8643b15890c1ed Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWU2ODZkOWNkZTRmYzNlMzZkMGFjMDJlZmI4NjQzYjE1ODkwYzFlZCJ9 fix: Modify client lib retry policy for CreateWriteStream with longer backoff, more error code and longer overall time PiperOrigin-RevId: 457061436 Source-Link: https://github.com/googleapis/googleapis/commit/8ff130bc81fa1d175e410d14a300caa18d5ebf80 Source-Link: https://github.com/googleapis/googleapis-gen/commit/2eb0faca717d9cf44b838b7db5e862451b8a86ef Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMmViMGZhY2E3MTdkOWNmNDRiODM4YjdkYjVlODYyNDUxYjhhODZlZiJ9 feat: add audience parameter PiperOrigin-RevId: 456827138 Source-Link: https://github.com/googleapis/googleapis/commit/23f1a157189581734c7a77cddfeb7c5bc1e440ae Source-Link: https://github.com/googleapis/googleapis-gen/commit/4075a8514f676691ec156688a5bbf183aa9893ce Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDA3NWE4NTE0ZjY3NjY5MWVjMTU2Njg4YTViYmYxODNhYTk4OTNjZSJ9 feat: add fields to eventually contain row level errors Committer: @gnanda PiperOrigin-RevId: 456324780 Source-Link: https://github.com/googleapis/googleapis/commit/f24b37a351260ddce8208edae50d637fa0b88d6b Source-Link: https://github.com/googleapis/googleapis-gen/commit/33f9d814082117116c4b68a6f5aac3f42bec35c2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzNmOWQ4MTQwODIxMTcxMTZjNGI2OGE2ZjVhYWMzZjQyYmVjMzVjMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(deps): require google-api-core >= 2.8.0 * regenerate pb2 files using latest grpcio tools * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../snippets/customer_record_pb2.py | 89 +--- bigquery_storage/snippets/sample_data_pb2.py | 391 +----------------- 2 files changed, 25 insertions(+), 455 deletions(-) diff --git a/bigquery_storage/snippets/customer_record_pb2.py b/bigquery_storage/snippets/customer_record_pb2.py index 14201ea912f..d797784b0bf 100644 --- a/bigquery_storage/snippets/customer_record_pb2.py +++ b/bigquery_storage/snippets/customer_record_pb2.py @@ -1,10 +1,9 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: customer_record.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +"""Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database @@ -14,86 +13,26 @@ _sym_db = _symbol_database.Default() -DESCRIPTOR = _descriptor.FileDescriptor( - name="customer_record.proto", - package="", - syntax="proto2", - serialized_options=None, - serialized_pb=_b( - '\n\x15\x63ustomer_record.proto"8\n\x0e\x43ustomerRecord\x12\x15\n\rcustomer_name\x18\x01 \x01(\t\x12\x0f\n\x07row_num\x18\x02 \x02(\x03' - ), -) - - -_CUSTOMERRECORD = _descriptor.Descriptor( - name="CustomerRecord", - full_name="CustomerRecord", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="customer_name", - full_name="CustomerRecord.customer_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="row_num", - full_name="CustomerRecord.row_num", - index=1, - number=2, - type=3, - cpp_type=2, - label=2, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto2", - extension_ranges=[], - oneofs=[], - serialized_start=25, - serialized_end=81, +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x15\x63ustomer_record.proto"8\n\x0e\x43ustomerRecord\x12\x15\n\rcustomer_name\x18\x01 \x01(\t\x12\x0f\n\x07row_num\x18\x02 \x02(\x03' ) -DESCRIPTOR.message_types_by_name["CustomerRecord"] = _CUSTOMERRECORD -_sym_db.RegisterFileDescriptor(DESCRIPTOR) +_CUSTOMERRECORD = DESCRIPTOR.message_types_by_name["CustomerRecord"] CustomerRecord = _reflection.GeneratedProtocolMessageType( "CustomerRecord", (_message.Message,), - dict( - DESCRIPTOR=_CUSTOMERRECORD, - __module__="customer_record_pb2" + { + "DESCRIPTOR": _CUSTOMERRECORD, + "__module__": "customer_record_pb2" # @@protoc_insertion_point(class_scope:CustomerRecord) - ), + }, ) _sym_db.RegisterMessage(CustomerRecord) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _CUSTOMERRECORD._serialized_start = 25 + _CUSTOMERRECORD._serialized_end = 81 # @@protoc_insertion_point(module_scope) diff --git a/bigquery_storage/snippets/sample_data_pb2.py b/bigquery_storage/snippets/sample_data_pb2.py index b1478016fc9..85106843c8c 100644 --- a/bigquery_storage/snippets/sample_data_pb2.py +++ b/bigquery_storage/snippets/sample_data_pb2.py @@ -3,6 +3,7 @@ # source: sample_data.proto """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database @@ -12,389 +13,13 @@ _sym_db = _symbol_database.Default() -DESCRIPTOR = _descriptor.FileDescriptor( - name="sample_data.proto", - package="", - syntax="proto2", - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n\x11sample_data.proto"\xa9\x03\n\nSampleData\x12\x10\n\x08\x62ool_col\x18\x01 \x01(\x08\x12\x11\n\tbytes_col\x18\x02 \x01(\x0c\x12\x13\n\x0b\x66loat64_col\x18\x03 \x01(\x01\x12\x11\n\tint64_col\x18\x04 \x01(\x03\x12\x12\n\nstring_col\x18\x05 \x01(\t\x12\x10\n\x08\x64\x61te_col\x18\x06 \x01(\x05\x12\x14\n\x0c\x64\x61tetime_col\x18\x07 \x01(\t\x12\x15\n\rgeography_col\x18\x08 \x01(\t\x12\x13\n\x0bnumeric_col\x18\t \x01(\t\x12\x16\n\x0e\x62ignumeric_col\x18\n \x01(\t\x12\x10\n\x08time_col\x18\x0b \x01(\t\x12\x15\n\rtimestamp_col\x18\x0c \x01(\x03\x12\x12\n\nint64_list\x18\r \x03(\x03\x12,\n\nstruct_col\x18\x0e \x01(\x0b\x32\x18.SampleData.SampleStruct\x12-\n\x0bstruct_list\x18\x0f \x03(\x0b\x32\x18.SampleData.SampleStruct\x12\x0f\n\x07row_num\x18\x10 \x02(\x03\x1a#\n\x0cSampleStruct\x12\x13\n\x0bsub_int_col\x18\x01 \x01(\x03', +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x11sample_data.proto"\xa9\x03\n\nSampleData\x12\x10\n\x08\x62ool_col\x18\x01 \x01(\x08\x12\x11\n\tbytes_col\x18\x02 \x01(\x0c\x12\x13\n\x0b\x66loat64_col\x18\x03 \x01(\x01\x12\x11\n\tint64_col\x18\x04 \x01(\x03\x12\x12\n\nstring_col\x18\x05 \x01(\t\x12\x10\n\x08\x64\x61te_col\x18\x06 \x01(\x05\x12\x14\n\x0c\x64\x61tetime_col\x18\x07 \x01(\t\x12\x15\n\rgeography_col\x18\x08 \x01(\t\x12\x13\n\x0bnumeric_col\x18\t \x01(\t\x12\x16\n\x0e\x62ignumeric_col\x18\n \x01(\t\x12\x10\n\x08time_col\x18\x0b \x01(\t\x12\x15\n\rtimestamp_col\x18\x0c \x01(\x03\x12\x12\n\nint64_list\x18\r \x03(\x03\x12,\n\nstruct_col\x18\x0e \x01(\x0b\x32\x18.SampleData.SampleStruct\x12-\n\x0bstruct_list\x18\x0f \x03(\x0b\x32\x18.SampleData.SampleStruct\x12\x0f\n\x07row_num\x18\x10 \x02(\x03\x1a#\n\x0cSampleStruct\x12\x13\n\x0bsub_int_col\x18\x01 \x01(\x03' ) -_SAMPLEDATA_SAMPLESTRUCT = _descriptor.Descriptor( - name="SampleStruct", - full_name="SampleData.SampleStruct", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="sub_int_col", - full_name="SampleData.SampleStruct.sub_int_col", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto2", - extension_ranges=[], - oneofs=[], - serialized_start=412, - serialized_end=447, -) - -_SAMPLEDATA = _descriptor.Descriptor( - name="SampleData", - full_name="SampleData", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="bool_col", - full_name="SampleData.bool_col", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="bytes_col", - full_name="SampleData.bytes_col", - index=1, - number=2, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="float64_col", - full_name="SampleData.float64_col", - index=2, - number=3, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="int64_col", - full_name="SampleData.int64_col", - index=3, - number=4, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="string_col", - full_name="SampleData.string_col", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="date_col", - full_name="SampleData.date_col", - index=5, - number=6, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="datetime_col", - full_name="SampleData.datetime_col", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="geography_col", - full_name="SampleData.geography_col", - index=7, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="numeric_col", - full_name="SampleData.numeric_col", - index=8, - number=9, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="bignumeric_col", - full_name="SampleData.bignumeric_col", - index=9, - number=10, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="time_col", - full_name="SampleData.time_col", - index=10, - number=11, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="timestamp_col", - full_name="SampleData.timestamp_col", - index=11, - number=12, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="int64_list", - full_name="SampleData.int64_list", - index=12, - number=13, - type=3, - cpp_type=2, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="struct_col", - full_name="SampleData.struct_col", - index=13, - number=14, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="struct_list", - full_name="SampleData.struct_list", - index=14, - number=15, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="row_num", - full_name="SampleData.row_num", - index=15, - number=16, - type=3, - cpp_type=2, - label=2, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[ - _SAMPLEDATA_SAMPLESTRUCT, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto2", - extension_ranges=[], - oneofs=[], - serialized_start=22, - serialized_end=447, -) - -_SAMPLEDATA_SAMPLESTRUCT.containing_type = _SAMPLEDATA -_SAMPLEDATA.fields_by_name["struct_col"].message_type = _SAMPLEDATA_SAMPLESTRUCT -_SAMPLEDATA.fields_by_name["struct_list"].message_type = _SAMPLEDATA_SAMPLESTRUCT -DESCRIPTOR.message_types_by_name["SampleData"] = _SAMPLEDATA -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - +_SAMPLEDATA = DESCRIPTOR.message_types_by_name["SampleData"] +_SAMPLEDATA_SAMPLESTRUCT = _SAMPLEDATA.nested_types_by_name["SampleStruct"] SampleData = _reflection.GeneratedProtocolMessageType( "SampleData", (_message.Message,), @@ -416,5 +41,11 @@ _sym_db.RegisterMessage(SampleData) _sym_db.RegisterMessage(SampleData.SampleStruct) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _SAMPLEDATA._serialized_start = 22 + _SAMPLEDATA._serialized_end = 447 + _SAMPLEDATA_SAMPLESTRUCT._serialized_start = 412 + _SAMPLEDATA_SAMPLESTRUCT._serialized_end = 447 # @@protoc_insertion_point(module_scope) From 4ae714a7029f74d365a8b3c77a5b88d2f8ca8623 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 2 Aug 2022 17:05:14 +0200 Subject: [PATCH 169/338] chore(deps): update all dependencies (#476) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert * remove protobuf Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements.txt | 5 ++--- bigquery_storage/to_dataframe/requirements.txt | 10 +++++----- 4 files changed, 9 insertions(+), 10 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 1ebe3ee421d..691d3082169 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.13.1 +google-cloud-bigquery-storage==2.14.1 diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 0b8f81a49d1..05fefa400b3 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ -google-cloud-testutils==1.3.1 +google-cloud-testutils==1.3.3 pytest==7.1.2 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 12079cf70da..64e0aa5a4a4 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,2 @@ -google-cloud-bigquery-storage==2.13.1 -google-cloud-bigquery==3.1.0 -protobuf==3.20.1 +google-cloud-bigquery-storage==2.14.1 +google-cloud-bigquery==3.3.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 99a91731c4a..a15050cdedc 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,11 +1,11 @@ -google-auth==2.6.6 -google-cloud-bigquery-storage==2.13.1 -google-cloud-bigquery==3.1.0 +google-auth==2.9.1 +google-cloud-bigquery-storage==2.14.1 +google-cloud-bigquery==3.3.0 pyarrow==8.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' ipython==8.4.0; python_version >= '3.9' pandas===1.3.5; python_version == '3.7' -pandas==1.4.2; python_version > '3.7' +pandas==1.4.3; python_version > '3.7' tqdm==4.64.0 -db-dtypes==1.0.1 +db-dtypes==1.0.2 From f2dc7708729a1beb96536644ed7563527a582b93 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 9 Aug 2022 12:33:39 +0200 Subject: [PATCH 170/338] chore(deps): update all dependencies (#478) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert * chore: update dependency google-cloud-bigquery and db-dtypes Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 64e0aa5a4a4..d2f646b08a1 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.14.1 -google-cloud-bigquery==3.3.0 +google-cloud-bigquery==3.3.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index a15050cdedc..6bb9cbfb1a7 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,11 +1,11 @@ google-auth==2.9.1 google-cloud-bigquery-storage==2.14.1 -google-cloud-bigquery==3.3.0 -pyarrow==8.0.0 +google-cloud-bigquery==3.3.1 +pyarrow==9.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' ipython==8.4.0; python_version >= '3.9' pandas===1.3.5; python_version == '3.7' pandas==1.4.3; python_version > '3.7' tqdm==4.64.0 -db-dtypes==1.0.2 +db-dtypes==1.0.3 From 5d14bbeefe5080f556cb62c1b1fd74e7e4bb3d8d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 9 Aug 2022 12:51:45 +0200 Subject: [PATCH 171/338] chore(deps): update all dependencies (#483) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * revert * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Anthonios Partheniou Co-authored-by: Owl Bot --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 6bb9cbfb1a7..edea2f036c8 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.9.1 +google-auth==2.10.0 google-cloud-bigquery-storage==2.14.1 google-cloud-bigquery==3.3.1 pyarrow==9.0.0 From 5327c15cd0deaa89a2417b800fcb2747d7c687a5 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 16 Aug 2022 16:25:29 +0200 Subject: [PATCH 172/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.14.2 (#488) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 691d3082169..129e07d48c6 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.14.1 +google-cloud-bigquery-storage==2.14.2 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index d2f646b08a1..ec58e5df7bd 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.14.1 +google-cloud-bigquery-storage==2.14.2 google-cloud-bigquery==3.3.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index edea2f036c8..5d1d563b6b9 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.10.0 -google-cloud-bigquery-storage==2.14.1 +google-cloud-bigquery-storage==2.14.2 google-cloud-bigquery==3.3.1 pyarrow==9.0.0 ipython===7.31.1; python_version == '3.7' From 386c665e1345c34433d57ed5c0f140b743839ea9 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 23 Aug 2022 16:32:28 +0200 Subject: [PATCH 173/338] chore(deps): update dependency google-cloud-bigquery to v3.3.2 (#489) Co-authored-by: Anthonios Partheniou --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index ec58e5df7bd..815c795cebc 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.14.2 -google-cloud-bigquery==3.3.1 +google-cloud-bigquery==3.3.2 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 5d1d563b6b9..9c4a7dcff82 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==2.10.0 google-cloud-bigquery-storage==2.14.2 -google-cloud-bigquery==3.3.1 +google-cloud-bigquery==3.3.2 pyarrow==9.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' From e0a95d2930a11768eb81045950538babf2722350 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 23 Aug 2022 17:44:28 +0200 Subject: [PATCH 174/338] chore(deps): update dependency google-auth to v2.11.0 (#492) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 9c4a7dcff82..c165cf9f27a 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.10.0 +google-auth==2.11.0 google-cloud-bigquery-storage==2.14.2 google-cloud-bigquery==3.3.2 pyarrow==9.0.0 From 5631376c5743d4f7b7b4f491d788db8ffb825f5f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 31 Aug 2022 18:03:22 +0200 Subject: [PATCH 175/338] chore(deps): update dependency pandas to v1.4.4 (#498) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index c165cf9f27a..9aa0b564a11 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -6,6 +6,6 @@ ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' ipython==8.4.0; python_version >= '3.9' pandas===1.3.5; python_version == '3.7' -pandas==1.4.3; python_version > '3.7' +pandas==1.4.4; python_version > '3.7' tqdm==4.64.0 db-dtypes==1.0.3 From b2847687013b846bf5e22d60cd0aefa5add59b43 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 6 Sep 2022 17:41:45 +0200 Subject: [PATCH 176/338] chore(deps): update all dependencies (#504) --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index d00689e0623..e07168502ea 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.2 +pytest==7.1.3 diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 05fefa400b3..8394ed52816 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ google-cloud-testutils==1.3.3 -pytest==7.1.2 +pytest==7.1.3 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index d00689e0623..e07168502ea 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.2 +pytest==7.1.3 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 9aa0b564a11..ddaf39e3c44 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -4,8 +4,8 @@ google-cloud-bigquery==3.3.2 pyarrow==9.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' -ipython==8.4.0; python_version >= '3.9' +ipython==8.5.0; python_version >= '3.9' pandas===1.3.5; python_version == '3.7' pandas==1.4.4; python_version > '3.7' -tqdm==4.64.0 +tqdm==4.64.1 db-dtypes==1.0.3 From 21e7cb51629c2a44a6a04a72cac9ec2296edb8ff Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 7 Sep 2022 14:16:27 +0000 Subject: [PATCH 177/338] chore: Bump gapic-generator-python version to 1.3.0 (#506) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 472561635 Source-Link: https://github.com/googleapis/googleapis/commit/332ecf599f8e747d8d1213b77ae7db26eff12814 Source-Link: https://github.com/googleapis/googleapis-gen/commit/4313d682880fd9d7247291164d4e9d3d5bd9f177 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDMxM2Q2ODI4ODBmZDlkNzI0NzI5MTE2NGQ0ZTlkM2Q1YmQ5ZjE3NyJ9 --- ...ig_query_read_create_read_session_async.py | 7 + ...big_query_read_create_read_session_sync.py | 7 + ...enerated_big_query_read_read_rows_async.py | 7 + ...generated_big_query_read_read_rows_sync.py | 7 + ..._big_query_read_split_read_stream_async.py | 7 + ...d_big_query_read_split_read_stream_sync.py | 7 + ...rated_big_query_write_append_rows_async.py | 7 + ...erated_big_query_write_append_rows_sync.py | 7 + ..._write_batch_commit_write_streams_async.py | 9 +- ...y_write_batch_commit_write_streams_sync.py | 9 +- ...g_query_write_create_write_stream_async.py | 7 + ...ig_query_write_create_write_stream_sync.py | 7 + ...query_write_finalize_write_stream_async.py | 7 + ..._query_write_finalize_write_stream_sync.py | 7 + ...erated_big_query_write_flush_rows_async.py | 7 + ...nerated_big_query_write_flush_rows_sync.py | 7 + ..._big_query_write_get_write_stream_async.py | 7 + ...d_big_query_write_get_write_stream_sync.py | 7 + ...ig_query_read_create_read_session_async.py | 7 + ...big_query_read_create_read_session_sync.py | 7 + ...enerated_big_query_read_read_rows_async.py | 7 + ...generated_big_query_read_read_rows_sync.py | 7 + ..._big_query_read_split_read_stream_async.py | 7 + ...d_big_query_read_split_read_stream_sync.py | 7 + ...rated_big_query_write_append_rows_async.py | 7 + ...erated_big_query_write_append_rows_sync.py | 7 + ..._write_batch_commit_write_streams_async.py | 9 +- ...y_write_batch_commit_write_streams_sync.py | 9 +- ...g_query_write_create_write_stream_async.py | 7 + ...ig_query_write_create_write_stream_sync.py | 7 + ...query_write_finalize_write_stream_async.py | 7 + ..._query_write_finalize_write_stream_sync.py | 7 + ...erated_big_query_write_flush_rows_async.py | 7 + ...nerated_big_query_write_flush_rows_sync.py | 7 + ..._big_query_write_get_write_stream_async.py | 7 + ...d_big_query_write_get_write_stream_sync.py | 7 + .../snippet_metadata_bigquery storage_v1.json | 360 +++++++++--------- ...pet_metadata_bigquery storage_v1beta2.json | 360 +++++++++--------- 38 files changed, 616 insertions(+), 364 deletions(-) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py index a8e43397c7e..ccf607d2715 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py index f38b679c375..86bad351b02 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py index 5ca0fd07a94..72026516e33 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryRead_ReadRows_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py index 1b8dca04047..68fa31134f3 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryRead_ReadRows_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py index f58e887d201..2c0b697d375 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py index 6a44fc92afd..9692ba37769 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py index 0e11ca4b473..34fb01321fd 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py index 6fe414b18ab..fba91faab1a 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryWrite_AppendRows_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py index d165b2682ef..da8d31415e7 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 @@ -34,7 +41,7 @@ async def sample_batch_commit_write_streams(): # Initialize request argument(s) request = bigquery_storage_v1.BatchCommitWriteStreamsRequest( parent="parent_value", - write_streams=["write_streams_value_1", "write_streams_value_2"], + write_streams=["write_streams_value1", "write_streams_value2"], ) # Make the request diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py index ba80de9c918..162647eae22 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 @@ -34,7 +41,7 @@ def sample_batch_commit_write_streams(): # Initialize request argument(s) request = bigquery_storage_v1.BatchCommitWriteStreamsRequest( parent="parent_value", - write_streams=["write_streams_value_1", "write_streams_value_2"], + write_streams=["write_streams_value1", "write_streams_value2"], ) # Make the request diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py index 2f108e8ccc2..0d763268da7 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py index c1c482048fd..d1c6f5b9ec7 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py index 508f16b9e23..d1923783ed5 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py index 14eb6e3fe6d..99e89a5e72a 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py index 766b493f576..632abddb81d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py index e4dd727aa33..490ec1fe84e 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryWrite_FlushRows_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py index 22c5ac75e70..00c8604acb9 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py index 4954a257317..54260dd65af 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py index 3b2aa376520..16e34a9d91c 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py index 46e346c2593..184fcdf0266 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py index 2f8be0a4342..91659f55331 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py index 770e00fe005..993d5da7a8d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py index ab8cca952de..6c2eec31229 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py index e82619c5706..637b4c7f712 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py index 86d1d70dac3..843fbeda666 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py index 706f9d41a89..51118d10dae 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py index a534cd92ef2..11136e94a70 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 @@ -34,7 +41,7 @@ async def sample_batch_commit_write_streams(): # Initialize request argument(s) request = bigquery_storage_v1beta2.BatchCommitWriteStreamsRequest( parent="parent_value", - write_streams=["write_streams_value_1", "write_streams_value_2"], + write_streams=["write_streams_value1", "write_streams_value2"], ) # Make the request diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py index f31e195d15d..3ff602c8981 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 @@ -34,7 +41,7 @@ def sample_batch_commit_write_streams(): # Initialize request argument(s) request = bigquery_storage_v1beta2.BatchCommitWriteStreamsRequest( parent="parent_value", - write_streams=["write_streams_value_1", "write_streams_value_2"], + write_streams=["write_streams_value1", "write_streams_value2"], ) # Make the request diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py index c033128673f..ed1fc872fb9 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py index b3d72ded20d..6118fca1eec 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py index 065f36088dd..746548c5170 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py index fa6f9c4f496..813798efefa 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py index 221be051cfb..7a790e4cf0d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py index 2a018b2bbe0..e84ebb2fc67 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py index c3e92057b19..0c51aea4cd0 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py index 78bf2f7807e..03e651c7607 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py @@ -24,6 +24,13 @@ # [START bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_storage_v1beta2 diff --git a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json index fffc63cbc83..28ddb5745eb 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json @@ -67,33 +67,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -155,33 +155,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -240,33 +240,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -324,33 +324,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -401,33 +401,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -477,33 +477,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -554,33 +554,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async", "segments": [ { - "end": 55, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 62, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 48, - "start": 34, + "end": 55, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 58, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 52, + "end": 63, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -630,33 +630,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_sync", "segments": [ { - "end": 55, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 62, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 48, - "start": 34, + "end": 55, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 58, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 52, + "end": 63, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -711,33 +711,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -791,33 +791,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -876,33 +876,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -960,33 +960,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1041,33 +1041,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1121,33 +1121,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1202,33 +1202,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1282,33 +1282,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1363,33 +1363,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1443,33 +1443,33 @@ "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], diff --git a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json index fccea8301cc..e58914b745a 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json @@ -67,33 +67,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -155,33 +155,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -240,33 +240,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -324,33 +324,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -401,33 +401,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -477,33 +477,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -554,33 +554,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_async", "segments": [ { - "end": 55, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 62, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 48, - "start": 34, + "end": 55, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 58, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 52, + "end": 63, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -630,33 +630,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_sync", "segments": [ { - "end": 55, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 62, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 48, - "start": 34, + "end": 55, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 58, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 52, + "end": 63, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -711,33 +711,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -791,33 +791,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -876,33 +876,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -960,33 +960,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1041,33 +1041,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1121,33 +1121,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1202,33 +1202,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1282,33 +1282,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1363,33 +1363,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1443,33 +1443,33 @@ "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], From 6763361248efc0384bc2f8a4565cbb1ce58bfbd1 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 7 Sep 2022 17:30:11 +0200 Subject: [PATCH 178/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.15.0 (#505) Co-authored-by: Anthonios Partheniou --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 129e07d48c6..f04cb54196a 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.14.2 +google-cloud-bigquery-storage==2.15.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 815c795cebc..44c435e0ae1 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.14.2 +google-cloud-bigquery-storage==2.15.0 google-cloud-bigquery==3.3.2 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index ddaf39e3c44..d3bd75bb490 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.11.0 -google-cloud-bigquery-storage==2.14.2 +google-cloud-bigquery-storage==2.15.0 google-cloud-bigquery==3.3.2 pyarrow==9.0.0 ipython===7.31.1; python_version == '3.7' From 14048758f758222b0a260783b14c64f8f3220503 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 13 Sep 2022 12:21:11 -0400 Subject: [PATCH 179/338] chore: detect samples tests in nested directories (#510) Source-Link: https://github.com/googleapis/synthtool/commit/50db768f450a50d7c1fd62513c113c9bb96fd434 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 Co-authored-by: Owl Bot --- bigquery_storage/quickstart/noxfile.py | 6 ++++-- bigquery_storage/snippets/noxfile.py | 6 ++++-- bigquery_storage/to_dataframe/noxfile.py | 6 ++++-- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index 29b5bc85218..b053ca568f6 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -208,8 +208,10 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("*_test.py") + glob.glob("test_*.py") - test_list.extend(glob.glob("tests")) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) + test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: print("No tests found, skipping directory.") diff --git a/bigquery_storage/snippets/noxfile.py b/bigquery_storage/snippets/noxfile.py index 29b5bc85218..b053ca568f6 100644 --- a/bigquery_storage/snippets/noxfile.py +++ b/bigquery_storage/snippets/noxfile.py @@ -208,8 +208,10 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("*_test.py") + glob.glob("test_*.py") - test_list.extend(glob.glob("tests")) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) + test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: print("No tests found, skipping directory.") diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index 29b5bc85218..b053ca568f6 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -208,8 +208,10 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("*_test.py") + glob.glob("test_*.py") - test_list.extend(glob.glob("tests")) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) + test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: print("No tests found, skipping directory.") From 30f09e9cee3b1a480d87a9e8fce854857b752da4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 19 Sep 2022 21:23:47 +0200 Subject: [PATCH 180/338] chore(deps): update dependency pandas to v1.5.0 (#511) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index d3bd75bb490..1180fd21bc1 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -6,6 +6,6 @@ ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' ipython==8.5.0; python_version >= '3.9' pandas===1.3.5; python_version == '3.7' -pandas==1.4.4; python_version > '3.7' +pandas==1.5.0; python_version > '3.7' tqdm==4.64.1 db-dtypes==1.0.3 From f3578dabd0395e1f6e7e65920e8f83b2e470bdad Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 20 Sep 2022 03:43:14 +0200 Subject: [PATCH 181/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.16.0 (#512) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index f04cb54196a..609ec5cd729 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.15.0 +google-cloud-bigquery-storage==2.16.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 44c435e0ae1..23db8c48688 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.15.0 +google-cloud-bigquery-storage==2.16.0 google-cloud-bigquery==3.3.2 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 1180fd21bc1..45756727f42 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.11.0 -google-cloud-bigquery-storage==2.15.0 +google-cloud-bigquery-storage==2.16.0 google-cloud-bigquery==3.3.2 pyarrow==9.0.0 ipython===7.31.1; python_version == '3.7' From cd0394602fd3b84d6a5c93e2cc6488dbcf4a06ae Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 20 Sep 2022 13:19:14 +0200 Subject: [PATCH 182/338] chore(deps): update dependency google-auth to v2.11.1 (#513) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 45756727f42..ba9dc80dd57 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.11.0 +google-auth==2.11.1 google-cloud-bigquery-storage==2.16.0 google-cloud-bigquery==3.3.2 pyarrow==9.0.0 From 3e18cb50ef8e06ea0e30ebb15e9a0aba272f12ee Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 21 Sep 2022 16:51:10 +0200 Subject: [PATCH 183/338] chore(deps): update dependency db-dtypes to v1.0.4 (#514) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index ba9dc80dd57..2d29fb23ffc 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -8,4 +8,4 @@ ipython==8.5.0; python_version >= '3.9' pandas===1.3.5; python_version == '3.7' pandas==1.5.0; python_version > '3.7' tqdm==4.64.1 -db-dtypes==1.0.3 +db-dtypes==1.0.4 From 8ade5e2ca9e285c3ee0dd00652859972463add71 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 3 Oct 2022 19:07:27 +0200 Subject: [PATCH 184/338] chore(deps): update dependency google-auth to v2.12.0 (#518) Co-authored-by: Anthonios Partheniou --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 2d29fb23ffc..3fc8d325db3 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.11.1 +google-auth==2.12.0 google-cloud-bigquery-storage==2.16.0 google-cloud-bigquery==3.3.2 pyarrow==9.0.0 From 1f1a8acd3baaa6cf4e954b8e422e9967ddee2d1c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 4 Oct 2022 01:43:02 +0200 Subject: [PATCH 185/338] chore(deps): update all dependencies (#521) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 4 ++-- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 609ec5cd729..05fdf5ee2bc 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.16.0 +google-cloud-bigquery-storage==2.16.1 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 23db8c48688..a8f663f7863 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.16.0 -google-cloud-bigquery==3.3.2 +google-cloud-bigquery-storage==2.16.1 +google-cloud-bigquery==3.3.3 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 3fc8d325db3..3f215e85fa5 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==2.12.0 -google-cloud-bigquery-storage==2.16.0 -google-cloud-bigquery==3.3.2 +google-cloud-bigquery-storage==2.16.1 +google-cloud-bigquery==3.3.3 pyarrow==9.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' From 20b9c0e9f76e475ea4b9c37d3ba00b0e15c00428 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 10 Oct 2022 20:15:23 +0200 Subject: [PATCH 186/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.16.2 (#524) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 05fdf5ee2bc..022ce341ae5 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.16.1 +google-cloud-bigquery-storage==2.16.2 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index a8f663f7863..fb55f567e9c 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.16.1 +google-cloud-bigquery-storage==2.16.2 google-cloud-bigquery==3.3.3 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 3f215e85fa5..05418f67aca 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.12.0 -google-cloud-bigquery-storage==2.16.1 +google-cloud-bigquery-storage==2.16.2 google-cloud-bigquery==3.3.3 pyarrow==9.0.0 ipython===7.31.1; python_version == '3.7' From be5054798d5158883ee56374e6b3fe0b29f2dcdc Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 18 Oct 2022 15:14:53 +0200 Subject: [PATCH 187/338] chore(deps): update all dependencies (#525) --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index fb55f567e9c..2624ee48aef 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.16.2 -google-cloud-bigquery==3.3.3 +google-cloud-bigquery==3.3.5 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 05418f67aca..deb9656a949 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ -google-auth==2.12.0 +google-auth==2.13.0 google-cloud-bigquery-storage==2.16.2 -google-cloud-bigquery==3.3.3 +google-cloud-bigquery==3.3.5 pyarrow==9.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' From 6c8726b77d16eec7430ed07e335bd1387948a591 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 19 Oct 2022 15:43:46 +0200 Subject: [PATCH 188/338] chore(deps): update dependency pandas to v1.5.1 (#526) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index deb9656a949..fc134f655e2 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -6,6 +6,6 @@ ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' ipython==8.5.0; python_version >= '3.9' pandas===1.3.5; python_version == '3.7' -pandas==1.5.0; python_version > '3.7' +pandas==1.5.1; python_version > '3.7' tqdm==4.64.1 db-dtypes==1.0.4 From 6b2ef9e6d18b325b083d2c22a7a4fcdb300eeda1 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 26 Oct 2022 12:55:20 +0200 Subject: [PATCH 189/338] chore(deps): update dependency pytest to v7.2.0 (#527) --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index e07168502ea..49780e03569 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.3 +pytest==7.2.0 diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 8394ed52816..5059e56e23c 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ google-cloud-testutils==1.3.3 -pytest==7.1.3 +pytest==7.2.0 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index e07168502ea..49780e03569 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.3 +pytest==7.2.0 From 7f3a019839b118bec1cdca43046b35384117203e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 26 Nov 2022 18:47:23 -0500 Subject: [PATCH 190/338] chore(python): drop flake8-import-order in samples noxfile (#535) Source-Link: https://github.com/googleapis/synthtool/commit/6ed3a831cb9ff69ef8a504c353e098ec0192ad93 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3abfa0f1886adaf0b83f07cb117b24a639ea1cb9cffe56d43280b977033563eb Co-authored-by: Owl Bot --- bigquery_storage/quickstart/noxfile.py | 26 +++--------------------- bigquery_storage/snippets/noxfile.py | 26 +++--------------------- bigquery_storage/to_dataframe/noxfile.py | 26 +++--------------------- 3 files changed, 9 insertions(+), 69 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index b053ca568f6..e8283c38d4a 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -18,7 +18,7 @@ import os from pathlib import Path import sys -from typing import Callable, Dict, List, Optional +from typing import Callable, Dict, Optional import nox @@ -109,22 +109,6 @@ def get_pytest_env_vars() -> Dict[str, str]: # -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - # Linting with flake8. # # We ignore the following rules: @@ -139,7 +123,6 @@ def _determine_local_import_names(start_dir: str) -> List[str]: "--show-source", "--builtin=gettext", "--max-complexity=20", - "--import-order-style=google", "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", "--max-line-length=88", @@ -149,14 +132,11 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") + session.install("flake8") else: - session.install("flake8", "flake8-import-order", "flake8-annotations") + session.install("flake8", "flake8-annotations") - local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), ".", ] session.run("flake8", *args) diff --git a/bigquery_storage/snippets/noxfile.py b/bigquery_storage/snippets/noxfile.py index b053ca568f6..e8283c38d4a 100644 --- a/bigquery_storage/snippets/noxfile.py +++ b/bigquery_storage/snippets/noxfile.py @@ -18,7 +18,7 @@ import os from pathlib import Path import sys -from typing import Callable, Dict, List, Optional +from typing import Callable, Dict, Optional import nox @@ -109,22 +109,6 @@ def get_pytest_env_vars() -> Dict[str, str]: # -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - # Linting with flake8. # # We ignore the following rules: @@ -139,7 +123,6 @@ def _determine_local_import_names(start_dir: str) -> List[str]: "--show-source", "--builtin=gettext", "--max-complexity=20", - "--import-order-style=google", "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", "--max-line-length=88", @@ -149,14 +132,11 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") + session.install("flake8") else: - session.install("flake8", "flake8-import-order", "flake8-annotations") + session.install("flake8", "flake8-annotations") - local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), ".", ] session.run("flake8", *args) diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index b053ca568f6..e8283c38d4a 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -18,7 +18,7 @@ import os from pathlib import Path import sys -from typing import Callable, Dict, List, Optional +from typing import Callable, Dict, Optional import nox @@ -109,22 +109,6 @@ def get_pytest_env_vars() -> Dict[str, str]: # -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - # Linting with flake8. # # We ignore the following rules: @@ -139,7 +123,6 @@ def _determine_local_import_names(start_dir: str) -> List[str]: "--show-source", "--builtin=gettext", "--max-complexity=20", - "--import-order-style=google", "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", "--max-line-length=88", @@ -149,14 +132,11 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") + session.install("flake8") else: - session.install("flake8", "flake8-import-order", "flake8-annotations") + session.install("flake8", "flake8-annotations") - local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), ".", ] session.run("flake8", *args) From 7f8c20538322c3e26803a0de853192abc0c6f99e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 27 Nov 2022 01:34:03 -0800 Subject: [PATCH 191/338] feat: add missing_value_interpretations to AppendRowsRequest (#529) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update to gapic-generator-python 1.5.0 feat: add support for `google.cloud..__version__` PiperOrigin-RevId: 484665853 Source-Link: https://github.com/googleapis/googleapis/commit/8eb249a19db926c2fbc4ecf1dc09c0e521a88b22 Source-Link: https://github.com/googleapis/googleapis-gen/commit/c8aa327b5f478865fc3fd91e3c2768e54e26ad44 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYzhhYTMyN2I1ZjQ3ODg2NWZjM2ZkOTFlM2MyNzY4ZTU0ZTI2YWQ0NCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * docs: remove stale header guidance for AppendRows PiperOrigin-RevId: 485941276 Source-Link: https://github.com/googleapis/googleapis/commit/a5f5928e736ea88c03e48c506a19fa632b43de9e Source-Link: https://github.com/googleapis/googleapis-gen/commit/61ebfaa325101bc9b29ee34900b45b2f0d23981e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjFlYmZhYTMyNTEwMWJjOWIyOWVlMzQ5MDBiNDViMmYwZDIzOTgxZSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update version in gapic_version.py * chore: Update to gapic-generator-python 1.6.0 feat(python): Add typing to proto.Message based class attributes feat(python): Snippetgen handling of repeated enum field PiperOrigin-RevId: 487326846 Source-Link: https://github.com/googleapis/googleapis/commit/da380c77bb87ba0f752baf07605dd1db30e1f7e1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/61ef5762ee6731a0cbbfea22fd0eecee51ab1c8e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjFlZjU3NjJlZTY3MzFhMGNiYmZlYTIyZmQwZWVjZWU1MWFiMWM4ZSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: new APIs added to reflect updates to the filestore service - Add ENTERPRISE Tier - Add snapshot APIs: RevertInstance, ListSnapshots, CreateSnapshot, DeleteSnapshot, UpdateSnapshot - Add multi-share APIs: ListShares, GetShare, CreateShare, DeleteShare, UpdateShare - Add ConnectMode to NetworkConfig (for Private Service Access support) - New status codes (SUSPENDED/SUSPENDING, REVERTING/RESUMING) - Add SuspensionReason (for KMS related suspension) - Add new fields to Instance information: max_capacity_gb, capacity_step_size_gb, max_share_count, capacity_gb, multi_share_enabled PiperOrigin-RevId: 487492758 Source-Link: https://github.com/googleapis/googleapis/commit/5be5981f50322cf0c7388595e0f31ac5d0693469 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ab0e217f560cc2c1afc11441c2eab6b6950efd2b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWIwZTIxN2Y1NjBjYzJjMWFmYzExNDQxYzJlYWI2YjY5NTBlZmQyYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.6.1 PiperOrigin-RevId: 488036204 Source-Link: https://github.com/googleapis/googleapis/commit/08f275f5c1c0d99056e1cb68376323414459ee19 Source-Link: https://github.com/googleapis/googleapis-gen/commit/555c0945e60649e38739ae64bc45719cdf72178f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTU1YzA5NDVlNjA2NDllMzg3MzlhZTY0YmM0NTcxOWNkZjcyMTc4ZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: add missing_value_interpretations to AppendRowsRequest PiperOrigin-RevId: 488693558 Source-Link: https://github.com/googleapis/googleapis/commit/43bf96fa41b19ed90790e157a0d0d22ecd20c0d8 Source-Link: https://github.com/googleapis/googleapis-gen/commit/1532dc6fd5b52a53613304e75aac1b5da407b6b3 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTUzMmRjNmZkNWI1MmE1MzYxMzMwNGU3NWFhYzFiNWRhNDA3YjZiMyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou Co-authored-by: Rosie Zou --- ...data_google.cloud.bigquery.storage.v1.json | 1480 +++++++++++++++++ ...google.cloud.bigquery.storage.v1beta2.json | 1480 +++++++++++++++++ 2 files changed, 2960 insertions(+) create mode 100644 bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json create mode 100644 bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json new file mode 100644 index 00000000000..c1d157157c6 --- /dev/null +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -0,0 +1,1480 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.bigquery.storage.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-bigquery-storage", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient", + "shortName": "BigQueryReadAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient.create_read_session", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", + "shortName": "BigQueryRead" + }, + "shortName": "CreateReadSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.CreateReadSessionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "read_session", + "type": "google.cloud.bigquery_storage_v1.types.ReadSession" + }, + { + "name": "max_stream_count", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.ReadSession", + "shortName": "create_read_session" + }, + "description": "Sample for CreateReadSession", + "file": "bigquerystorage_v1_generated_big_query_read_create_read_session_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_read_create_read_session_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient", + "shortName": "BigQueryReadClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient.create_read_session", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", + "shortName": "BigQueryRead" + }, + "shortName": "CreateReadSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.CreateReadSessionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "read_session", + "type": "google.cloud.bigquery_storage_v1.types.ReadSession" + }, + { + "name": "max_stream_count", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.ReadSession", + "shortName": "create_read_session" + }, + "description": "Sample for CreateReadSession", + "file": "bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient", + "shortName": "BigQueryReadAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient.read_rows", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", + "shortName": "BigQueryRead" + }, + "shortName": "ReadRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.ReadRowsRequest" + }, + { + "name": "read_stream", + "type": "str" + }, + { + "name": "offset", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.ReadRowsResponse]", + "shortName": "read_rows" + }, + "description": "Sample for ReadRows", + "file": "bigquerystorage_v1_generated_big_query_read_read_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_read_read_rows_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient", + "shortName": "BigQueryReadClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient.read_rows", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", + "shortName": "BigQueryRead" + }, + "shortName": "ReadRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.ReadRowsRequest" + }, + { + "name": "read_stream", + "type": "str" + }, + { + "name": "offset", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.ReadRowsResponse]", + "shortName": "read_rows" + }, + "description": "Sample for ReadRows", + "file": "bigquerystorage_v1_generated_big_query_read_read_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_read_read_rows_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient", + "shortName": "BigQueryReadAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient.split_read_stream", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", + "shortName": "BigQueryRead" + }, + "shortName": "SplitReadStream" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.SplitReadStreamRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.SplitReadStreamResponse", + "shortName": "split_read_stream" + }, + "description": "Sample for SplitReadStream", + "file": "bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient", + "shortName": "BigQueryReadClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient.split_read_stream", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", + "shortName": "BigQueryRead" + }, + "shortName": "SplitReadStream" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.SplitReadStreamRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.SplitReadStreamResponse", + "shortName": "split_read_stream" + }, + "description": "Sample for SplitReadStream", + "file": "bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.append_rows", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "AppendRows" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.bigquery_storage_v1.types.AppendRowsRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.AppendRowsResponse]", + "shortName": "append_rows" + }, + "description": "Sample for AppendRows", + "file": "bigquerystorage_v1_generated_big_query_write_append_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_write_append_rows_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.append_rows", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "AppendRows" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.bigquery_storage_v1.types.AppendRowsRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.AppendRowsResponse]", + "shortName": "append_rows" + }, + "description": "Sample for AppendRows", + "file": "bigquerystorage_v1_generated_big_query_write_append_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_write_append_rows_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.batch_commit_write_streams", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "BatchCommitWriteStreams" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsResponse", + "shortName": "batch_commit_write_streams" + }, + "description": "Sample for BatchCommitWriteStreams", + "file": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.batch_commit_write_streams", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "BatchCommitWriteStreams" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsResponse", + "shortName": "batch_commit_write_streams" + }, + "description": "Sample for BatchCommitWriteStreams", + "file": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.create_write_stream", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "CreateWriteStream" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.CreateWriteStreamRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "write_stream", + "type": "google.cloud.bigquery_storage_v1.types.WriteStream" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", + "shortName": "create_write_stream" + }, + "description": "Sample for CreateWriteStream", + "file": "bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.create_write_stream", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "CreateWriteStream" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.CreateWriteStreamRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "write_stream", + "type": "google.cloud.bigquery_storage_v1.types.WriteStream" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", + "shortName": "create_write_stream" + }, + "description": "Sample for CreateWriteStream", + "file": "bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.finalize_write_stream", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "FinalizeWriteStream" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamResponse", + "shortName": "finalize_write_stream" + }, + "description": "Sample for FinalizeWriteStream", + "file": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.finalize_write_stream", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "FinalizeWriteStream" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamResponse", + "shortName": "finalize_write_stream" + }, + "description": "Sample for FinalizeWriteStream", + "file": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.flush_rows", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "FlushRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.FlushRowsRequest" + }, + { + "name": "write_stream", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.FlushRowsResponse", + "shortName": "flush_rows" + }, + "description": "Sample for FlushRows", + "file": "bigquerystorage_v1_generated_big_query_write_flush_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_write_flush_rows_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.flush_rows", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "FlushRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.FlushRowsRequest" + }, + { + "name": "write_stream", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.FlushRowsResponse", + "shortName": "flush_rows" + }, + "description": "Sample for FlushRows", + "file": "bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.get_write_stream", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "GetWriteStream" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.GetWriteStreamRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", + "shortName": "get_write_stream" + }, + "description": "Sample for GetWriteStream", + "file": "bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.get_write_stream", + "method": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream", + "service": { + "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "GetWriteStream" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1.types.GetWriteStreamRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", + "shortName": "get_write_stream" + }, + "description": "Sample for GetWriteStream", + "file": "bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py" + } + ] +} diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json new file mode 100644 index 00000000000..302b781542e --- /dev/null +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -0,0 +1,1480 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.bigquery.storage.v1beta2", + "version": "v1beta2" + } + ], + "language": "PYTHON", + "name": "google-cloud-bigquery-storage", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient", + "shortName": "BigQueryReadAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient.create_read_session", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.CreateReadSession", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", + "shortName": "BigQueryRead" + }, + "shortName": "CreateReadSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.CreateReadSessionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "read_session", + "type": "google.cloud.bigquery_storage_v1beta2.types.ReadSession" + }, + { + "name": "max_stream_count", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.ReadSession", + "shortName": "create_read_session" + }, + "description": "Sample for CreateReadSession", + "file": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient", + "shortName": "BigQueryReadClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient.create_read_session", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.CreateReadSession", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", + "shortName": "BigQueryRead" + }, + "shortName": "CreateReadSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.CreateReadSessionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "read_session", + "type": "google.cloud.bigquery_storage_v1beta2.types.ReadSession" + }, + { + "name": "max_stream_count", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.ReadSession", + "shortName": "create_read_session" + }, + "description": "Sample for CreateReadSession", + "file": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient", + "shortName": "BigQueryReadAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient.read_rows", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.ReadRows", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", + "shortName": "BigQueryRead" + }, + "shortName": "ReadRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.ReadRowsRequest" + }, + { + "name": "read_stream", + "type": "str" + }, + { + "name": "offset", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.ReadRowsResponse]", + "shortName": "read_rows" + }, + "description": "Sample for ReadRows", + "file": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient", + "shortName": "BigQueryReadClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient.read_rows", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.ReadRows", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", + "shortName": "BigQueryRead" + }, + "shortName": "ReadRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.ReadRowsRequest" + }, + { + "name": "read_stream", + "type": "str" + }, + { + "name": "offset", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.ReadRowsResponse]", + "shortName": "read_rows" + }, + "description": "Sample for ReadRows", + "file": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient", + "shortName": "BigQueryReadAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient.split_read_stream", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.SplitReadStream", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", + "shortName": "BigQueryRead" + }, + "shortName": "SplitReadStream" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamResponse", + "shortName": "split_read_stream" + }, + "description": "Sample for SplitReadStream", + "file": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient", + "shortName": "BigQueryReadClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient.split_read_stream", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.SplitReadStream", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", + "shortName": "BigQueryRead" + }, + "shortName": "SplitReadStream" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamResponse", + "shortName": "split_read_stream" + }, + "description": "Sample for SplitReadStream", + "file": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.append_rows", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.AppendRows", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "AppendRows" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.bigquery_storage_v1beta2.types.AppendRowsRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.AppendRowsResponse]", + "shortName": "append_rows" + }, + "description": "Sample for AppendRows", + "file": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.append_rows", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.AppendRows", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "AppendRows" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.bigquery_storage_v1beta2.types.AppendRowsRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.AppendRowsResponse]", + "shortName": "append_rows" + }, + "description": "Sample for AppendRows", + "file": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.batch_commit_write_streams", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.BatchCommitWriteStreams", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "BatchCommitWriteStreams" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsResponse", + "shortName": "batch_commit_write_streams" + }, + "description": "Sample for BatchCommitWriteStreams", + "file": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.batch_commit_write_streams", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.BatchCommitWriteStreams", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "BatchCommitWriteStreams" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsResponse", + "shortName": "batch_commit_write_streams" + }, + "description": "Sample for BatchCommitWriteStreams", + "file": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.create_write_stream", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.CreateWriteStream", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "CreateWriteStream" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.CreateWriteStreamRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "write_stream", + "type": "google.cloud.bigquery_storage_v1beta2.types.WriteStream" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", + "shortName": "create_write_stream" + }, + "description": "Sample for CreateWriteStream", + "file": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.create_write_stream", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.CreateWriteStream", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "CreateWriteStream" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.CreateWriteStreamRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "write_stream", + "type": "google.cloud.bigquery_storage_v1beta2.types.WriteStream" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", + "shortName": "create_write_stream" + }, + "description": "Sample for CreateWriteStream", + "file": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.finalize_write_stream", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FinalizeWriteStream", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "FinalizeWriteStream" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamResponse", + "shortName": "finalize_write_stream" + }, + "description": "Sample for FinalizeWriteStream", + "file": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.finalize_write_stream", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FinalizeWriteStream", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "FinalizeWriteStream" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamResponse", + "shortName": "finalize_write_stream" + }, + "description": "Sample for FinalizeWriteStream", + "file": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.flush_rows", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FlushRows", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "FlushRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsRequest" + }, + { + "name": "write_stream", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsResponse", + "shortName": "flush_rows" + }, + "description": "Sample for FlushRows", + "file": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.flush_rows", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FlushRows", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "FlushRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsRequest" + }, + { + "name": "write_stream", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsResponse", + "shortName": "flush_rows" + }, + "description": "Sample for FlushRows", + "file": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", + "shortName": "BigQueryWriteAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.get_write_stream", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.GetWriteStream", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "GetWriteStream" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.GetWriteStreamRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", + "shortName": "get_write_stream" + }, + "description": "Sample for GetWriteStream", + "file": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", + "shortName": "BigQueryWriteClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.get_write_stream", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.GetWriteStream", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", + "shortName": "BigQueryWrite" + }, + "shortName": "GetWriteStream" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta2.types.GetWriteStreamRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", + "shortName": "get_write_stream" + }, + "description": "Sample for GetWriteStream", + "file": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py" + } + ] +} From d4109d8026aa98e93c914bb2e47d0733ef35cfe1 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 30 Nov 2022 14:40:51 -0500 Subject: [PATCH 192/338] chore: move version to gapic_version.py (#537) * chore: move version to gapic_version.py * customize testing/constraints-3.7.txt * set coverage level to 98% --- .../snippet_metadata_bigquery storage_v1.json | 1479 ----------------- ...pet_metadata_bigquery storage_v1beta2.json | 1479 ----------------- 2 files changed, 2958 deletions(-) delete mode 100644 bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json delete mode 100644 bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json diff --git a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json deleted file mode 100644 index 28ddb5745eb..00000000000 --- a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1.json +++ /dev/null @@ -1,1479 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.storage.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-storage" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient", - "shortName": "BigQueryReadAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient.create_read_session", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "CreateReadSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.CreateReadSessionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "read_session", - "type": "google.cloud.bigquery_storage_v1.types.ReadSession" - }, - { - "name": "max_stream_count", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.ReadSession", - "shortName": "create_read_session" - }, - "description": "Sample for CreateReadSession", - "file": "bigquerystorage_v1_generated_big_query_read_create_read_session_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_read_create_read_session_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient", - "shortName": "BigQueryReadClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient.create_read_session", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "CreateReadSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.CreateReadSessionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "read_session", - "type": "google.cloud.bigquery_storage_v1.types.ReadSession" - }, - { - "name": "max_stream_count", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.ReadSession", - "shortName": "create_read_session" - }, - "description": "Sample for CreateReadSession", - "file": "bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient", - "shortName": "BigQueryReadAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient.read_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "ReadRows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.ReadRowsRequest" - }, - { - "name": "read_stream", - "type": "str" - }, - { - "name": "offset", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.ReadRowsResponse]", - "shortName": "read_rows" - }, - "description": "Sample for ReadRows", - "file": "bigquerystorage_v1_generated_big_query_read_read_rows_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_read_read_rows_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient", - "shortName": "BigQueryReadClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient.read_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "ReadRows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.ReadRowsRequest" - }, - { - "name": "read_stream", - "type": "str" - }, - { - "name": "offset", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.ReadRowsResponse]", - "shortName": "read_rows" - }, - "description": "Sample for ReadRows", - "file": "bigquerystorage_v1_generated_big_query_read_read_rows_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_read_read_rows_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient", - "shortName": "BigQueryReadAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient.split_read_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "SplitReadStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.SplitReadStreamRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.SplitReadStreamResponse", - "shortName": "split_read_stream" - }, - "description": "Sample for SplitReadStream", - "file": "bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient", - "shortName": "BigQueryReadClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient.split_read_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "SplitReadStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.SplitReadStreamRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.SplitReadStreamResponse", - "shortName": "split_read_stream" - }, - "description": "Sample for SplitReadStream", - "file": "bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.append_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "AppendRows" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.cloud.bigquery_storage_v1.types.AppendRowsRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.AppendRowsResponse]", - "shortName": "append_rows" - }, - "description": "Sample for AppendRows", - "file": "bigquerystorage_v1_generated_big_query_write_append_rows_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_append_rows_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.append_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "AppendRows" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.cloud.bigquery_storage_v1.types.AppendRowsRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.AppendRowsResponse]", - "shortName": "append_rows" - }, - "description": "Sample for AppendRows", - "file": "bigquerystorage_v1_generated_big_query_write_append_rows_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_append_rows_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.batch_commit_write_streams", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "BatchCommitWriteStreams" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsResponse", - "shortName": "batch_commit_write_streams" - }, - "description": "Sample for BatchCommitWriteStreams", - "file": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.batch_commit_write_streams", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "BatchCommitWriteStreams" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsResponse", - "shortName": "batch_commit_write_streams" - }, - "description": "Sample for BatchCommitWriteStreams", - "file": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.create_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "CreateWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.CreateWriteStreamRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "write_stream", - "type": "google.cloud.bigquery_storage_v1.types.WriteStream" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", - "shortName": "create_write_stream" - }, - "description": "Sample for CreateWriteStream", - "file": "bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.create_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "CreateWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.CreateWriteStreamRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "write_stream", - "type": "google.cloud.bigquery_storage_v1.types.WriteStream" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", - "shortName": "create_write_stream" - }, - "description": "Sample for CreateWriteStream", - "file": "bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.finalize_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "FinalizeWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamResponse", - "shortName": "finalize_write_stream" - }, - "description": "Sample for FinalizeWriteStream", - "file": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.finalize_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "FinalizeWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamResponse", - "shortName": "finalize_write_stream" - }, - "description": "Sample for FinalizeWriteStream", - "file": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.flush_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "FlushRows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.FlushRowsRequest" - }, - { - "name": "write_stream", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.FlushRowsResponse", - "shortName": "flush_rows" - }, - "description": "Sample for FlushRows", - "file": "bigquerystorage_v1_generated_big_query_write_flush_rows_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_flush_rows_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.flush_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "FlushRows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.FlushRowsRequest" - }, - { - "name": "write_stream", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.FlushRowsResponse", - "shortName": "flush_rows" - }, - "description": "Sample for FlushRows", - "file": "bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.get_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "GetWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.GetWriteStreamRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", - "shortName": "get_write_stream" - }, - "description": "Sample for GetWriteStream", - "file": "bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.get_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "GetWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.GetWriteStreamRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", - "shortName": "get_write_stream" - }, - "description": "Sample for GetWriteStream", - "file": "bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py" - } - ] -} diff --git a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json deleted file mode 100644 index e58914b745a..00000000000 --- a/bigquery_storage/generated_samples/snippet_metadata_bigquery storage_v1beta2.json +++ /dev/null @@ -1,1479 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.storage.v1beta2", - "version": "v1beta2" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-storage" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient", - "shortName": "BigQueryReadAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient.create_read_session", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.CreateReadSession", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "CreateReadSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.CreateReadSessionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "read_session", - "type": "google.cloud.bigquery_storage_v1beta2.types.ReadSession" - }, - { - "name": "max_stream_count", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.ReadSession", - "shortName": "create_read_session" - }, - "description": "Sample for CreateReadSession", - "file": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient", - "shortName": "BigQueryReadClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient.create_read_session", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.CreateReadSession", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "CreateReadSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.CreateReadSessionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "read_session", - "type": "google.cloud.bigquery_storage_v1beta2.types.ReadSession" - }, - { - "name": "max_stream_count", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.ReadSession", - "shortName": "create_read_session" - }, - "description": "Sample for CreateReadSession", - "file": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient", - "shortName": "BigQueryReadAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient.read_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.ReadRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "ReadRows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.ReadRowsRequest" - }, - { - "name": "read_stream", - "type": "str" - }, - { - "name": "offset", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.ReadRowsResponse]", - "shortName": "read_rows" - }, - "description": "Sample for ReadRows", - "file": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient", - "shortName": "BigQueryReadClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient.read_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.ReadRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "ReadRows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.ReadRowsRequest" - }, - { - "name": "read_stream", - "type": "str" - }, - { - "name": "offset", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.ReadRowsResponse]", - "shortName": "read_rows" - }, - "description": "Sample for ReadRows", - "file": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient", - "shortName": "BigQueryReadAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient.split_read_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.SplitReadStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "SplitReadStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamResponse", - "shortName": "split_read_stream" - }, - "description": "Sample for SplitReadStream", - "file": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient", - "shortName": "BigQueryReadClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient.split_read_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.SplitReadStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "SplitReadStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamResponse", - "shortName": "split_read_stream" - }, - "description": "Sample for SplitReadStream", - "file": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.append_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.AppendRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "AppendRows" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.cloud.bigquery_storage_v1beta2.types.AppendRowsRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.AppendRowsResponse]", - "shortName": "append_rows" - }, - "description": "Sample for AppendRows", - "file": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.append_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.AppendRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "AppendRows" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.cloud.bigquery_storage_v1beta2.types.AppendRowsRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.AppendRowsResponse]", - "shortName": "append_rows" - }, - "description": "Sample for AppendRows", - "file": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.batch_commit_write_streams", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.BatchCommitWriteStreams", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "BatchCommitWriteStreams" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsResponse", - "shortName": "batch_commit_write_streams" - }, - "description": "Sample for BatchCommitWriteStreams", - "file": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.batch_commit_write_streams", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.BatchCommitWriteStreams", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "BatchCommitWriteStreams" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsResponse", - "shortName": "batch_commit_write_streams" - }, - "description": "Sample for BatchCommitWriteStreams", - "file": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.create_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.CreateWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "CreateWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.CreateWriteStreamRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "write_stream", - "type": "google.cloud.bigquery_storage_v1beta2.types.WriteStream" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", - "shortName": "create_write_stream" - }, - "description": "Sample for CreateWriteStream", - "file": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.create_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.CreateWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "CreateWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.CreateWriteStreamRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "write_stream", - "type": "google.cloud.bigquery_storage_v1beta2.types.WriteStream" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", - "shortName": "create_write_stream" - }, - "description": "Sample for CreateWriteStream", - "file": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.finalize_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FinalizeWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "FinalizeWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamResponse", - "shortName": "finalize_write_stream" - }, - "description": "Sample for FinalizeWriteStream", - "file": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.finalize_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FinalizeWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "FinalizeWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamResponse", - "shortName": "finalize_write_stream" - }, - "description": "Sample for FinalizeWriteStream", - "file": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.flush_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FlushRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "FlushRows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsRequest" - }, - { - "name": "write_stream", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsResponse", - "shortName": "flush_rows" - }, - "description": "Sample for FlushRows", - "file": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.flush_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FlushRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "FlushRows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsRequest" - }, - { - "name": "write_stream", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsResponse", - "shortName": "flush_rows" - }, - "description": "Sample for FlushRows", - "file": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.get_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.GetWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "GetWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.GetWriteStreamRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", - "shortName": "get_write_stream" - }, - "description": "Sample for GetWriteStream", - "file": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.get_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.GetWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "GetWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.GetWriteStreamRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", - "shortName": "get_write_stream" - }, - "description": "Sample for GetWriteStream", - "file": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py" - } - ] -} From b1788e9e4c89ae0858faff47f3f611ddb14a1bfc Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 15 Dec 2022 17:33:19 -0500 Subject: [PATCH 193/338] chore(main): release 2.17.0 (#536) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c1d157157c6..4caadee71fa 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.17.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 302b781542e..75092b6fe4b 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.17.0" }, "snippets": [ { From 09b96e091ba65fc5324ed7124b84de8dc4ee2813 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 Jan 2023 08:36:55 -0500 Subject: [PATCH 194/338] chore(python): add support for python 3.11 (#544) Source-Link: https://github.com/googleapis/synthtool/commit/7197a001ffb6d8ce7b0b9b11c280f0c536c1033a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 Co-authored-by: Owl Bot --- bigquery_storage/quickstart/noxfile.py | 2 +- bigquery_storage/snippets/noxfile.py | 2 +- bigquery_storage/to_dataframe/noxfile.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index e8283c38d4a..1224cbe212e 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/bigquery_storage/snippets/noxfile.py b/bigquery_storage/snippets/noxfile.py index e8283c38d4a..1224cbe212e 100644 --- a/bigquery_storage/snippets/noxfile.py +++ b/bigquery_storage/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index e8283c38d4a..1224cbe212e 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From 5f820f77135f76a3b4a0c02522b720bb00ec5d0f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 9 Jan 2023 17:05:19 +0000 Subject: [PATCH 195/338] chore(deps): update all dependencies (#528) * chore(deps): update all dependencies * revert ipython * add dependency ipywidgets Co-authored-by: Anthonios Partheniou --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 4 ++-- bigquery_storage/to_dataframe/requirements.txt | 13 +++++++------ 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 022ce341ae5..00a78a61571 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.16.2 +google-cloud-bigquery-storage==2.17.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 2624ee48aef..12a56dc33f0 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.16.2 -google-cloud-bigquery==3.3.5 +google-cloud-bigquery-storage==2.17.0 +google-cloud-bigquery==3.4.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index fc134f655e2..2f809bc2232 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,11 +1,12 @@ -google-auth==2.13.0 -google-cloud-bigquery-storage==2.16.2 -google-cloud-bigquery==3.3.5 -pyarrow==9.0.0 +google-auth==2.15.0 +google-cloud-bigquery-storage==2.17.0 +google-cloud-bigquery==3.4.1 +pyarrow==10.0.1 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' ipython==8.5.0; python_version >= '3.9' +ipywidgets==8.0.4 pandas===1.3.5; python_version == '3.7' -pandas==1.5.1; python_version > '3.7' +pandas==1.5.2; python_version > '3.7' tqdm==4.64.1 -db-dtypes==1.0.4 +db-dtypes==1.0.5 From bfd070747e0a09bf9ef2215b13681d8cd2e6bb9c Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 9 Jan 2023 17:59:03 +0000 Subject: [PATCH 196/338] chore(deps): update dependency ipython to v8.8.0 (#545) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 2f809bc2232..bfa4dac8dae 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -4,7 +4,7 @@ google-cloud-bigquery==3.4.1 pyarrow==10.0.1 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' -ipython==8.5.0; python_version >= '3.9' +ipython==8.8.0; python_version >= '3.9' ipywidgets==8.0.4 pandas===1.3.5; python_version == '3.7' pandas==1.5.2; python_version > '3.7' From 22c5fbf0c0dd8e920184a2c3cf641dbd87281183 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 9 Jan 2023 22:49:17 +0000 Subject: [PATCH 197/338] chore(deps): update dependency google-auth to v2.16.0 (#546) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index bfa4dac8dae..3799e3a2638 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.15.0 +google-auth==2.16.0 google-cloud-bigquery-storage==2.17.0 google-cloud-bigquery==3.4.1 pyarrow==10.0.1 From 0158b335f2611272f96dc8d9bbfbb01ae20b851d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 9 Jan 2023 23:17:32 -0500 Subject: [PATCH 198/338] feat: Add support for python 3.11 (#547) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add support for python 3.11 chore: Update gapic-generator-python to v1.8.0 PiperOrigin-RevId: 500768693 Source-Link: https://github.com/googleapis/googleapis/commit/190b612e3d0ff8f025875a669e5d68a1446d43c1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/7bf29a414b9ecac3170f0b65bdc2a95705c0ef1a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2JmMjlhNDE0YjllY2FjMzE3MGYwYjY1YmRjMmE5NTcwNWMwZWYxYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * require proto-plus 1.22.2 for python 3.11 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index 4caadee71fa..c1d157157c6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.17.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 75092b6fe4b..302b781542e 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.17.0" + "version": "0.1.0" }, "snippets": [ { From 2822c136f2fc81089aad2a7ca51f9b43cfc9efaf Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 10 Jan 2023 13:21:17 -0500 Subject: [PATCH 199/338] chore(main): release 2.18.0 (#548) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c1d157157c6..928372005c0 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.18.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 302b781542e..15ee4e33b71 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.18.0" }, "snippets": [ { From a519419938b965b837e82870a44f3af0eac6661b Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 10 Jan 2023 20:04:13 +0000 Subject: [PATCH 200/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.18.0 (#549) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 00a78a61571..72659e594f5 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.17.0 +google-cloud-bigquery-storage==2.18.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 12a56dc33f0..e723636dfff 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.17.0 +google-cloud-bigquery-storage==2.18.0 google-cloud-bigquery==3.4.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 3799e3a2638..80ef95f9209 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.16.0 -google-cloud-bigquery-storage==2.17.0 +google-cloud-bigquery-storage==2.18.0 google-cloud-bigquery==3.4.1 pyarrow==10.0.1 ipython===7.31.1; python_version == '3.7' From 91332a27a44665bcb7e35ff41c9d11bf78c4808f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 14 Jan 2023 18:11:44 +0000 Subject: [PATCH 201/338] chore(deps): update dependency pytest to v7.2.1 (#550) --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 49780e03569..805eb2a9f84 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==7.2.0 +pytest==7.2.1 diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 5059e56e23c..fb319f4594d 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ google-cloud-testutils==1.3.3 -pytest==7.2.0 +pytest==7.2.1 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 49780e03569..805eb2a9f84 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==7.2.0 +pytest==7.2.1 From 6bd9746e0ecf47b0b62b4dd129b3133d17564153 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 18 Jan 2023 16:39:16 +0000 Subject: [PATCH 202/338] chore(deps): update dependency google-cloud-bigquery to v3.4.2 (#551) --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index e723636dfff..55079da21b0 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.18.0 -google-cloud-bigquery==3.4.1 +google-cloud-bigquery==3.4.2 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 80ef95f9209..ea3c6a83902 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==2.16.0 google-cloud-bigquery-storage==2.18.0 -google-cloud-bigquery==3.4.1 +google-cloud-bigquery==3.4.2 pyarrow==10.0.1 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' From a13a854555a8fedc4db6ccfc326927efa329aab3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 20 Jan 2023 11:46:09 -0500 Subject: [PATCH 203/338] docs: Add documentation for enums (#553) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Add documentation for enums fix: Add context manager return types chore: Update gapic-generator-python to v1.8.1 PiperOrigin-RevId: 503210727 Source-Link: https://github.com/googleapis/googleapis/commit/a391fd1dac18dfdfa00c18c8404f2c3a6ff8e98e Source-Link: https://github.com/googleapis/googleapis-gen/commit/0080f830dec37c3384157082bce279e37079ea58 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDA4MGY4MzBkZWMzN2MzMzg0MTU3MDgyYmNlMjc5ZTM3MDc5ZWE1OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * workaround docs issue Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index 928372005c0..c1d157157c6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.18.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 15ee4e33b71..302b781542e 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.18.0" + "version": "0.1.0" }, "snippets": [ { From 266f4bd5f50ea94fda1111f7c1a593a9feb49987 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 23 Jan 2023 11:02:21 -0500 Subject: [PATCH 204/338] chore(main): release 2.18.1 (#554) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c1d157157c6..ff4be332c58 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.18.1" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 302b781542e..f8727b6fdb0 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.18.1" }, "snippets": [ { From 02c412d1b6ae630ee4d1630179211a61d9345e9a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 23 Jan 2023 16:30:48 +0000 Subject: [PATCH 205/338] chore(deps): update dependency pandas to v1.5.3 (#552) Co-authored-by: Anthonios Partheniou --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index ea3c6a83902..16684dd121c 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -7,6 +7,6 @@ ipython===8.0.1; python_version == '3.8' ipython==8.8.0; python_version >= '3.9' ipywidgets==8.0.4 pandas===1.3.5; python_version == '3.7' -pandas==1.5.2; python_version > '3.7' +pandas==1.5.3; python_version > '3.7' tqdm==4.64.1 db-dtypes==1.0.5 From 5718ccc494ae7a6a5e2bea0b2f9f06019b67ca7a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 24 Jan 2023 15:08:23 +0000 Subject: [PATCH 206/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.18.1 (#556) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 72659e594f5..663c0eb09e9 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.18.0 +google-cloud-bigquery-storage==2.18.1 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 55079da21b0..feb9b5254cc 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.18.0 +google-cloud-bigquery-storage==2.18.1 google-cloud-bigquery==3.4.2 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 16684dd121c..b9946b8006f 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.16.0 -google-cloud-bigquery-storage==2.18.0 +google-cloud-bigquery-storage==2.18.1 google-cloud-bigquery==3.4.2 pyarrow==10.0.1 ipython===7.31.1; python_version == '3.7' From 72d4a7cee89d0afcb056a8e627a2434ef6f01d74 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 25 Jan 2023 10:53:36 -0500 Subject: [PATCH 207/338] chore: Update gapic-generator-python to v1.8.2 (#557) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.8.2 PiperOrigin-RevId: 504289125 Source-Link: https://github.com/googleapis/googleapis/commit/38a48a44a44279e9cf9f2f864b588958a2d87491 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b2dc22663dbe47a972c8d8c2f8a4df013dafdcbc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjJkYzIyNjYzZGJlNDdhOTcyYzhkOGMyZjhhNGRmMDEzZGFmZGNiYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index ff4be332c58..c1d157157c6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.18.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index f8727b6fdb0..302b781542e 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.18.1" + "version": "0.1.0" }, "snippets": [ { From 4ba06c36fcf3db90aa53235844db72d51fde962b Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 27 Jan 2023 19:11:07 +0000 Subject: [PATCH 208/338] chore(deps): update all dependencies (#564) Co-authored-by: shollyman --- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index b9946b8006f..390b13fc4a8 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,10 +1,10 @@ google-auth==2.16.0 google-cloud-bigquery-storage==2.18.1 google-cloud-bigquery==3.4.2 -pyarrow==10.0.1 +pyarrow==11.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' -ipython==8.8.0; python_version >= '3.9' +ipython==8.9.0; python_version >= '3.9' ipywidgets==8.0.4 pandas===1.3.5; python_version == '3.7' pandas==1.5.3; python_version > '3.7' From 92223f0749d50906a20a94fa63e226c4c3de2daa Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 2 Feb 2023 10:43:44 +0000 Subject: [PATCH 209/338] chore(deps): update dependency google-cloud-bigquery to v3.5.0 (#567) --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index feb9b5254cc..2ac060b3903 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.18.1 -google-cloud-bigquery==3.4.2 +google-cloud-bigquery==3.5.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 390b13fc4a8..c67c2a400c8 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==2.16.0 google-cloud-bigquery-storage==2.18.1 -google-cloud-bigquery==3.4.2 +google-cloud-bigquery==3.5.0 pyarrow==11.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' From 044ac7f5fa25ef7613b4e0708efb5e57d6675df6 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 15 Feb 2023 22:42:54 +0000 Subject: [PATCH 210/338] chore(deps): update dependency ipython to v8.10.0 (#573) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index c67c2a400c8..298422f8c70 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -4,7 +4,7 @@ google-cloud-bigquery==3.5.0 pyarrow==11.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' -ipython==8.9.0; python_version >= '3.9' +ipython==8.10.0; python_version >= '3.9' ipywidgets==8.0.4 pandas===1.3.5; python_version == '3.7' pandas==1.5.3; python_version > '3.7' From 8c4f52486e0f355483bcfe62fda6b220c6827de7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 16 Feb 2023 06:57:27 -0500 Subject: [PATCH 211/338] chore(deps): bump ipython from 7.31.1 to 8.10.0 in /samples/to_dataframe (#574) Bumps [ipython](https://github.com/ipython/ipython) from 7.31.1 to 8.10.0. - [Release notes](https://github.com/ipython/ipython/releases) - [Commits](https://github.com/ipython/ipython/compare/7.31.1...8.10.0) --- updated-dependencies: - dependency-name: ipython dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 298422f8c70..da111d7d855 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -3,7 +3,7 @@ google-cloud-bigquery-storage==2.18.1 google-cloud-bigquery==3.5.0 pyarrow==11.0.0 ipython===7.31.1; python_version == '3.7' -ipython===8.0.1; python_version == '3.8' +ipython===8.10.0; python_version == '3.8' ipython==8.10.0; python_version >= '3.9' ipywidgets==8.0.4 pandas===1.3.5; python_version == '3.7' From a1bb4b040d375f8b6b54ad19efe484d96c247a5e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 1 Mar 2023 10:11:52 +0000 Subject: [PATCH 212/338] chore(deps): update all dependencies (#575) --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 2ac060b3903..123c1cf4779 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.18.1 -google-cloud-bigquery==3.5.0 +google-cloud-bigquery==3.6.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index da111d7d855..da83f81716f 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,10 +1,10 @@ -google-auth==2.16.0 +google-auth==2.16.1 google-cloud-bigquery-storage==2.18.1 -google-cloud-bigquery==3.5.0 +google-cloud-bigquery==3.6.0 pyarrow==11.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' -ipython==8.10.0; python_version >= '3.9' +ipython==8.11.0; python_version >= '3.9' ipywidgets==8.0.4 pandas===1.3.5; python_version == '3.7' pandas==1.5.3; python_version > '3.7' From f14cbeec0c9f0c25a5bea98070904cb1217f2454 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 1 Mar 2023 13:22:19 -0500 Subject: [PATCH 213/338] chore(main): release 2.19.0 (#572) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c1d157157c6..b365a7c45db 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.19.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 302b781542e..5bfb6fe22ab 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.19.0" }, "snippets": [ { From 6b4eb25d7251ea5414e45786ff0af9c8582066bb Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 1 Mar 2023 20:42:44 +0000 Subject: [PATCH 214/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.19.0 (#579) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 663c0eb09e9..8c23ee3d2bb 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.18.1 +google-cloud-bigquery-storage==2.19.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 123c1cf4779..80efe3dad2c 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.18.1 +google-cloud-bigquery-storage==2.19.0 google-cloud-bigquery==3.6.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index da83f81716f..2b561911ec8 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.16.1 -google-cloud-bigquery-storage==2.18.1 +google-cloud-bigquery-storage==2.19.0 google-cloud-bigquery==3.6.0 pyarrow==11.0.0 ipython===7.31.1; python_version == '3.7' From 43e604673896adcc3ec0a1feb2bffc2dea121c25 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 4 Mar 2023 11:31:44 +0000 Subject: [PATCH 215/338] chore(deps): update all dependencies (#580) --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 805eb2a9f84..c021c5b5b70 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==7.2.1 +pytest==7.2.2 diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index fb319f4594d..dd558e969f2 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ google-cloud-testutils==1.3.3 -pytest==7.2.1 +pytest==7.2.2 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 805eb2a9f84..c021c5b5b70 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==7.2.1 +pytest==7.2.2 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 2b561911ec8..8744d45e593 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.16.1 +google-auth==2.16.2 google-cloud-bigquery-storage==2.19.0 google-cloud-bigquery==3.6.0 pyarrow==11.0.0 @@ -8,5 +8,5 @@ ipython==8.11.0; python_version >= '3.9' ipywidgets==8.0.4 pandas===1.3.5; python_version == '3.7' pandas==1.5.3; python_version > '3.7' -tqdm==4.64.1 +tqdm==4.65.0 db-dtypes==1.0.5 From df5a15a5d8ca741c08119f13fb56ff6d4efc4ee4 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 13 Mar 2023 20:29:39 +0000 Subject: [PATCH 216/338] chore(deps): update dependency google-cloud-bigquery to v3.7.0 (#582) --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 80efe3dad2c..683412117a6 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.19.0 -google-cloud-bigquery==3.6.0 +google-cloud-bigquery==3.7.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 8744d45e593..9ab7742cf28 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==2.16.2 google-cloud-bigquery-storage==2.19.0 -google-cloud-bigquery==3.6.0 +google-cloud-bigquery==3.7.0 pyarrow==11.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' From cc93f687b0429ba4967a560e79590e5c60a99933 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 23 Mar 2023 09:43:59 -0400 Subject: [PATCH 217/338] docs: Fix formatting of request arg in docstring (#586) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Fix formatting of request arg in docstring chore: Update gapic-generator-python to v1.9.1 PiperOrigin-RevId: 518604533 Source-Link: https://github.com/googleapis/googleapis/commit/8a085aeddfa010af5bcef090827aac5255383d7e Source-Link: https://github.com/googleapis/googleapis-gen/commit/b2ab4b0a0ae2907e812c209198a74e0898afcb04 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjJhYjRiMGEwYWUyOTA3ZTgxMmMyMDkxOThhNzRlMDg5OGFmY2IwNCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index b365a7c45db..c1d157157c6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.19.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 5bfb6fe22ab..302b781542e 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.19.0" + "version": "0.1.0" }, "snippets": [ { From a1878a5c827f209f1da6d8d587b42495ff4cfd8d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 11:32:55 -0400 Subject: [PATCH 218/338] chore(main): release 2.19.1 (#587) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c1d157157c6..a29de587873 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.19.1" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 302b781542e..aa4256a8574 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.19.1" }, "snippets": [ { From fb0bb39e5495ee5acc0a5bfed149c3e37b6ccd91 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 30 Mar 2023 01:49:24 +0100 Subject: [PATCH 219/338] chore(deps): update all dependencies (#585) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 4 ++-- bigquery_storage/to_dataframe/requirements.txt | 10 +++++----- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 8c23ee3d2bb..bd3bcfa4137 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.19.0 +google-cloud-bigquery-storage==2.19.1 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 683412117a6..98aa4b829f3 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.19.0 -google-cloud-bigquery==3.7.0 +google-cloud-bigquery-storage==2.19.1 +google-cloud-bigquery==3.9.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 9ab7742cf28..f65c36b3eca 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,12 +1,12 @@ -google-auth==2.16.2 -google-cloud-bigquery-storage==2.19.0 -google-cloud-bigquery==3.7.0 +google-auth==2.17.0 +google-cloud-bigquery-storage==2.19.1 +google-cloud-bigquery==3.9.0 pyarrow==11.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython==8.11.0; python_version >= '3.9' -ipywidgets==8.0.4 +ipywidgets==8.0.6 pandas===1.3.5; python_version == '3.7' pandas==1.5.3; python_version > '3.7' tqdm==4.65.0 -db-dtypes==1.0.5 +db-dtypes==1.1.0 From d2e4564594c46c76552ac18fbeea35879ca64667 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 6 Apr 2023 17:16:34 +0100 Subject: [PATCH 220/338] chore(deps): update all dependencies (#588) --- bigquery_storage/to_dataframe/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index f65c36b3eca..948d1e99a1b 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,12 +1,12 @@ -google-auth==2.17.0 +google-auth==2.17.1 google-cloud-bigquery-storage==2.19.1 google-cloud-bigquery==3.9.0 pyarrow==11.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' -ipython==8.11.0; python_version >= '3.9' +ipython==8.12.0; python_version >= '3.9' ipywidgets==8.0.6 pandas===1.3.5; python_version == '3.7' pandas==1.5.3; python_version > '3.7' tqdm==4.65.0 -db-dtypes==1.1.0 +db-dtypes==1.1.1 From 56c1310a4060f835a296ef01b2bfbf2dd8a21c04 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 18 Apr 2023 19:00:40 +0200 Subject: [PATCH 221/338] chore(deps): update all dependencies (#589) --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index c021c5b5b70..c4d04a08d02 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==7.2.2 +pytest==7.3.1 diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index dd558e969f2..eec27c1118c 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ google-cloud-testutils==1.3.3 -pytest==7.2.2 +pytest==7.3.1 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index c021c5b5b70..c4d04a08d02 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==7.2.2 +pytest==7.3.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 948d1e99a1b..bcd87d774cc 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.17.1 +google-auth==2.17.3 google-cloud-bigquery-storage==2.19.1 google-cloud-bigquery==3.9.0 pyarrow==11.0.0 @@ -7,6 +7,6 @@ ipython===8.10.0; python_version == '3.8' ipython==8.12.0; python_version >= '3.9' ipywidgets==8.0.6 pandas===1.3.5; python_version == '3.7' -pandas==1.5.3; python_version > '3.7' +pandas==2.0.0; python_version > '3.7' tqdm==4.65.0 db-dtypes==1.1.1 From 6a1ac31be89160e4364703b324345d1530708ccf Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 19 Apr 2023 18:09:20 +0200 Subject: [PATCH 222/338] chore(deps): update dependency google-cloud-bigquery to v3.10.0 (#590) --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 98aa4b829f3..e65726b54af 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.19.1 -google-cloud-bigquery==3.9.0 +google-cloud-bigquery==3.10.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index bcd87d774cc..76f348f47c8 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==2.17.3 google-cloud-bigquery-storage==2.19.1 -google-cloud-bigquery==3.9.0 +google-cloud-bigquery==3.10.0 pyarrow==11.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' From 7b63d5f04e6f2190a587134af6b42977184e06e5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 24 May 2023 11:47:16 -0700 Subject: [PATCH 223/338] feat: add table sampling to ReadAPI v1 (#596) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add table sampling to ReadAPI v1 feat: add storage error codes for KMS PiperOrigin-RevId: 534092654 Source-Link: https://github.com/googleapis/googleapis/commit/adcd87eb8cc501ba16f4df3051869c9392e9041f Source-Link: https://github.com/googleapis/googleapis-gen/commit/53f03dcd2ae0d86832d87a530aa538b9daebf2b0 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTNmMDNkY2QyYWUwZDg2ODMyZDg3YTUzMGFhNTM4YjlkYWViZjJiMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index a29de587873..c1d157157c6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.19.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index aa4256a8574..302b781542e 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.19.1" + "version": "0.1.0" }, "snippets": [ { From 2971ec68649c1e749daea216f1d888061c5120b8 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 31 May 2023 15:06:25 -0400 Subject: [PATCH 224/338] chore(main): release 2.20.0 (#597) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c1d157157c6..8489e90f8c0 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.20.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 302b781542e..449bef1cbbc 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.20.0" }, "snippets": [ { From b349b4b475f7a17ae7195b21ce3811738445f20f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 1 Jun 2023 13:27:06 +0200 Subject: [PATCH 225/338] chore(deps): update all dependencies (#591) Co-authored-by: Anthonios Partheniou --- bigquery_storage/to_dataframe/requirements.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 76f348f47c8..01e2b4b7d49 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,12 +1,12 @@ -google-auth==2.17.3 +google-auth==2.18.1 google-cloud-bigquery-storage==2.19.1 google-cloud-bigquery==3.10.0 -pyarrow==11.0.0 +pyarrow==12.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' -ipython==8.12.0; python_version >= '3.9' +ipython==8.13.2; python_version >= '3.9' ipywidgets==8.0.6 pandas===1.3.5; python_version == '3.7' -pandas==2.0.0; python_version > '3.7' +pandas==2.0.1; python_version > '3.7' tqdm==4.65.0 db-dtypes==1.1.1 From 1e4c16896c292f123a76026c190c3b8c42ee3b0b Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 1 Jun 2023 13:38:37 +0200 Subject: [PATCH 226/338] chore(deps): update all dependencies (#599) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index bd3bcfa4137..6b7ac1faf32 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.19.1 +google-cloud-bigquery-storage==2.20.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index e65726b54af..a6e915a8ddd 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.19.1 +google-cloud-bigquery-storage==2.20.0 google-cloud-bigquery==3.10.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 01e2b4b7d49..cbf6cb31823 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ -google-auth==2.18.1 -google-cloud-bigquery-storage==2.19.1 +google-auth==2.19.0 +google-cloud-bigquery-storage==2.20.0 google-cloud-bigquery==3.10.0 pyarrow==12.0.0 ipython===7.31.1; python_version == '3.7' @@ -7,6 +7,6 @@ ipython===8.10.0; python_version == '3.8' ipython==8.13.2; python_version >= '3.9' ipywidgets==8.0.6 pandas===1.3.5; python_version == '3.7' -pandas==2.0.1; python_version > '3.7' +pandas==2.0.2; python_version > '3.7' tqdm==4.65.0 db-dtypes==1.1.1 From f34f212c90f5906ba4b160f97da61f7fde100952 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 22 Jun 2023 15:44:10 +0200 Subject: [PATCH 227/338] chore(deps): update all dependencies (#600) --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 8 ++++---- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index c4d04a08d02..56628493b8c 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==7.3.1 +pytest==7.3.2 diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index eec27c1118c..6d55f931146 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ google-cloud-testutils==1.3.3 -pytest==7.3.1 +pytest==7.3.2 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index a6e915a8ddd..d871362ca9f 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.20.0 -google-cloud-bigquery==3.10.0 +google-cloud-bigquery==3.11.1 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index c4d04a08d02..56628493b8c 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==7.3.1 +pytest==7.3.2 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index cbf6cb31823..25559e38970 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,10 +1,10 @@ -google-auth==2.19.0 +google-auth==2.20.0 google-cloud-bigquery-storage==2.20.0 -google-cloud-bigquery==3.10.0 -pyarrow==12.0.0 +google-cloud-bigquery==3.11.1 +pyarrow==12.0.1 ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' -ipython==8.13.2; python_version >= '3.9' +ipython==8.14.0; python_version >= '3.9' ipywidgets==8.0.6 pandas===1.3.5; python_version == '3.7' pandas==2.0.2; python_version > '3.7' From 3cf0b1dc7ba4631696a6d412a127dbc355728875 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 22 Jun 2023 10:08:50 -0400 Subject: [PATCH 228/338] feat: add estimated physical file sizes to ReadAPI v1 (#605) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add estimated physical file sizes to ReadAPI v1 PiperOrigin-RevId: 542350532 Source-Link: https://github.com/googleapis/googleapis/commit/a4ff1c210c20efa3b81ecd3912936f96fcd0d708 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b5b5fe5dcd4bce15b7b9035b925452ee7caf489b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjViNWZlNWRjZDRiY2UxNWI3YjkwMzViOTI1NDUyZWU3Y2FmNDg5YiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index 8489e90f8c0..c1d157157c6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.20.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 449bef1cbbc..302b781542e 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.20.0" + "version": "0.1.0" }, "snippets": [ { From ed168a5f3e6917025a91d32fe01f58220da68bb2 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 5 Jul 2023 11:23:34 -0400 Subject: [PATCH 229/338] chore(main): release 2.21.0 (#607) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c1d157157c6..645bff8e4cc 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.21.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 302b781542e..ffcb67431f6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.21.0" }, "snippets": [ { From c1bcc08d44e2cc9310ecddcc1c58169d886faa09 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 5 Jul 2023 21:24:51 +0200 Subject: [PATCH 230/338] chore(deps): update dependency google-cloud-bigquery to v3.11.2 (#606) Co-authored-by: Anthonios Partheniou --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index d871362ca9f..9adb8323f5b 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.20.0 -google-cloud-bigquery==3.11.1 +google-cloud-bigquery==3.11.2 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 25559e38970..804c02cd786 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==2.20.0 google-cloud-bigquery-storage==2.20.0 -google-cloud-bigquery==3.11.1 +google-cloud-bigquery==3.11.2 pyarrow==12.0.1 ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' From ce5c36d6d976e2ad5ea052b65cf7efde3ec3a2a1 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 6 Jul 2023 01:03:28 +0200 Subject: [PATCH 231/338] chore(deps): update all dependencies (#611) --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements.txt | 4 ++-- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 10 +++++----- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 56628493b8c..70613be0cfe 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==7.3.2 +pytest==7.4.0 diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 6b7ac1faf32..e684bb47d31 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.20.0 +google-cloud-bigquery-storage==2.21.0 diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 6d55f931146..2a4dccc0df2 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ google-cloud-testutils==1.3.3 -pytest==7.3.2 +pytest==7.4.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 9adb8323f5b..9ad6f6cd31f 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.20.0 -google-cloud-bigquery==3.11.2 +google-cloud-bigquery-storage==2.21.0 +google-cloud-bigquery==3.11.3 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 56628493b8c..70613be0cfe 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==7.3.2 +pytest==7.4.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 804c02cd786..5f439cb7e8a 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,12 +1,12 @@ -google-auth==2.20.0 -google-cloud-bigquery-storage==2.20.0 -google-cloud-bigquery==3.11.2 +google-auth==2.21.0 +google-cloud-bigquery-storage==2.21.0 +google-cloud-bigquery==3.11.3 pyarrow==12.0.1 ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython==8.14.0; python_version >= '3.9' -ipywidgets==8.0.6 +ipywidgets==8.0.7 pandas===1.3.5; python_version == '3.7' -pandas==2.0.2; python_version > '3.7' +pandas==2.0.3; python_version > '3.7' tqdm==4.65.0 db-dtypes==1.1.1 From 900e97421062d0df1ae4a08d783738a013f0ee47 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 6 Jul 2023 10:08:39 -0400 Subject: [PATCH 232/338] feat: add ResourceExhausted to retryable error for Write API unary calls (#612) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add ResourceExhausted to retryable error for Write API unary calls docs: add multiplexing documentation PiperOrigin-RevId: 545839491 Source-Link: https://github.com/googleapis/googleapis/commit/2b006afc7a392006602ce0868c22341b5aeef4a8 Source-Link: https://github.com/googleapis/googleapis-gen/commit/0d52d385bd4e78c7b2c83755013fe103e804c384 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGQ1MmQzODViZDRlNzhjN2IyYzgzNzU1MDEzZmUxMDNlODA0YzM4NCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index 645bff8e4cc..c1d157157c6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.21.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index ffcb67431f6..302b781542e 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.21.0" + "version": "0.1.0" }, "snippets": [ { From e9ab0cab0cff1058b7fdc775ef2fe6f395b79d65 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 6 Jul 2023 12:13:33 -0400 Subject: [PATCH 233/338] chore(main): release 2.22.0 (#613) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c1d157157c6..d8c2d499ced 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.22.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 302b781542e..f9268aaefb0 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.22.0" }, "snippets": [ { From 4787b47658f537a8e0e6648e992fcd2d0f18655b Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 7 Jul 2023 00:12:56 +0200 Subject: [PATCH 234/338] chore(deps): update dependency google-cloud-bigquery-storage to v2.22.0 (#614) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index e684bb47d31..48c0a653c9e 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.21.0 +google-cloud-bigquery-storage==2.22.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 9ad6f6cd31f..e2cff95ffca 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.21.0 +google-cloud-bigquery-storage==2.22.0 google-cloud-bigquery==3.11.3 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 5f439cb7e8a..e80f9d2c8c3 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.21.0 -google-cloud-bigquery-storage==2.21.0 +google-cloud-bigquery-storage==2.22.0 google-cloud-bigquery==3.11.3 pyarrow==12.0.1 ipython===7.31.1; python_version == '3.7' From 794ebf2a9229482c59c6d20ab8d583fdef60b136 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 10 Jul 2023 09:05:50 -0400 Subject: [PATCH 235/338] chore: Update gapic-generator-python to v1.11.2 (#615) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.11.2 PiperOrigin-RevId: 546510849 Source-Link: https://github.com/googleapis/googleapis/commit/736073ad9a9763a170eceaaa54519bcc0ea55a5e Source-Link: https://github.com/googleapis/googleapis-gen/commit/deb64e8ec19d141e31089fe932b3a997ad541c4d Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZGViNjRlOGVjMTlkMTQxZTMxMDg5ZmU5MzJiM2E5OTdhZDU0MWM0ZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- ...age_v1_generated_big_query_read_create_read_session_async.py | 2 +- ...rage_v1_generated_big_query_read_create_read_session_sync.py | 2 +- ...gquerystorage_v1_generated_big_query_read_read_rows_async.py | 2 +- ...igquerystorage_v1_generated_big_query_read_read_rows_sync.py | 2 +- ...orage_v1_generated_big_query_read_split_read_stream_async.py | 2 +- ...torage_v1_generated_big_query_read_split_read_stream_sync.py | 2 +- ...erystorage_v1_generated_big_query_write_append_rows_async.py | 2 +- ...uerystorage_v1_generated_big_query_write_append_rows_sync.py | 2 +- ...enerated_big_query_write_batch_commit_write_streams_async.py | 2 +- ...generated_big_query_write_batch_commit_write_streams_sync.py | 2 +- ...ge_v1_generated_big_query_write_create_write_stream_async.py | 2 +- ...age_v1_generated_big_query_write_create_write_stream_sync.py | 2 +- ..._v1_generated_big_query_write_finalize_write_stream_async.py | 2 +- ...e_v1_generated_big_query_write_finalize_write_stream_sync.py | 2 +- ...uerystorage_v1_generated_big_query_write_flush_rows_async.py | 2 +- ...querystorage_v1_generated_big_query_write_flush_rows_sync.py | 2 +- ...orage_v1_generated_big_query_write_get_write_stream_async.py | 2 +- ...torage_v1_generated_big_query_write_get_write_stream_sync.py | 2 +- ...1beta2_generated_big_query_read_create_read_session_async.py | 2 +- ...v1beta2_generated_big_query_read_create_read_session_sync.py | 2 +- ...ystorage_v1beta2_generated_big_query_read_read_rows_async.py | 2 +- ...rystorage_v1beta2_generated_big_query_read_read_rows_sync.py | 2 +- ..._v1beta2_generated_big_query_read_split_read_stream_async.py | 2 +- ...e_v1beta2_generated_big_query_read_split_read_stream_sync.py | 2 +- ...orage_v1beta2_generated_big_query_write_append_rows_async.py | 2 +- ...torage_v1beta2_generated_big_query_write_append_rows_sync.py | 2 +- ...enerated_big_query_write_batch_commit_write_streams_async.py | 2 +- ...generated_big_query_write_batch_commit_write_streams_sync.py | 2 +- ...beta2_generated_big_query_write_create_write_stream_async.py | 2 +- ...1beta2_generated_big_query_write_create_write_stream_sync.py | 2 +- ...ta2_generated_big_query_write_finalize_write_stream_async.py | 2 +- ...eta2_generated_big_query_write_finalize_write_stream_sync.py | 2 +- ...torage_v1beta2_generated_big_query_write_flush_rows_async.py | 2 +- ...storage_v1beta2_generated_big_query_write_flush_rows_sync.py | 2 +- ..._v1beta2_generated_big_query_write_get_write_stream_async.py | 2 +- ...e_v1beta2_generated_big_query_write_get_write_stream_sync.py | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 38 files changed, 38 insertions(+), 38 deletions(-) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py index ccf607d2715..6411ff6b7d4 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py index 86bad351b02..ff2a64bede1 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py index 72026516e33..fe590e478c0 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py index 68fa31134f3..10224fa18fb 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py index 2c0b697d375..765abc3c1bb 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py index 9692ba37769..22d209a4d9d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py index 34fb01321fd..915076b6a0b 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py index fba91faab1a..f7cd0459ffe 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py index da8d31415e7..d1113092708 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py index 162647eae22..af326fa89b4 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py index 0d763268da7..28a7ddf17b6 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py index d1c6f5b9ec7..ed298f92e6c 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py index d1923783ed5..6799e295f35 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py index 99e89a5e72a..c5b9cb129ab 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py index 632abddb81d..dac63130ff9 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py index 490ec1fe84e..9217fc938c1 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py index 00c8604acb9..dda01971cf6 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py index 54260dd65af..46a7d9275a7 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py index 16e34a9d91c..377cd61bf40 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py index 184fcdf0266..212f80b1dc6 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py index 91659f55331..696e2d62416 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py index 993d5da7a8d..1a2b4817080 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py index 6c2eec31229..5671fdc524e 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py index 637b4c7f712..687261a4731 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py index 843fbeda666..4668d76bd90 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py index 51118d10dae..a25d7fe374d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py index 11136e94a70..bb9175fb831 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py index 3ff602c8981..a0aaa866fa3 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py index ed1fc872fb9..57c20ad0cfc 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py index 6118fca1eec..a57b83cb74b 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py index 746548c5170..bac5a2f2dcd 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py index 813798efefa..19f459ac66c 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py index 7a790e4cf0d..2927e788022 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py index e84ebb2fc67..ea6ac1c63be 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py index 0c51aea4cd0..6069e667f64 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py index 03e651c7607..fd330df57c8 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index d8c2d499ced..c1d157157c6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.22.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index f9268aaefb0..302b781542e 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.22.0" + "version": "0.1.0" }, "snippets": [ { From 374608e6ae56e3d53c2885487d74d612d0acf4f1 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 12 Jul 2023 17:33:17 +0200 Subject: [PATCH 236/338] chore(deps): update dependency google-auth to v2.22.0 (#616) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index e80f9d2c8c3..c6a9e16be9f 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.21.0 +google-auth==2.22.0 google-cloud-bigquery-storage==2.22.0 google-cloud-bigquery==3.11.3 pyarrow==12.0.1 From 37bad29e34d7f9d2c93ad7d3d7798898c75986a6 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 22 Jul 2023 13:09:27 +0200 Subject: [PATCH 237/338] chore(deps): update dependency google-cloud-bigquery to v3.11.4 (#621) Co-authored-by: Anthonios Partheniou --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index e2cff95ffca..2833567bcc4 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.22.0 -google-cloud-bigquery==3.11.3 +google-cloud-bigquery==3.11.4 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index c6a9e16be9f..e1b23bbbf36 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==2.22.0 google-cloud-bigquery-storage==2.22.0 -google-cloud-bigquery==3.11.3 +google-cloud-bigquery==3.11.4 pyarrow==12.0.1 ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' From f67698f70cd162416108d06c43daf31f1a26c511 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 1 Aug 2023 22:44:28 +0200 Subject: [PATCH 238/338] chore(deps): update dependency ipywidgets to v8.1.0 (#626) Co-authored-by: Anthonios Partheniou --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index e1b23bbbf36..9f99eb3c88b 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -5,7 +5,7 @@ pyarrow==12.0.1 ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython==8.14.0; python_version >= '3.9' -ipywidgets==8.0.7 +ipywidgets==8.1.0 pandas===1.3.5; python_version == '3.7' pandas==2.0.3; python_version > '3.7' tqdm==4.65.0 From 725f413c8f260d7b5c8a463fce7eead4b924f5b7 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 8 Aug 2023 16:04:29 +0200 Subject: [PATCH 239/338] chore(deps): update dependency tqdm to v4.65.1 (#631) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 9f99eb3c88b..26eee7c3b97 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -8,5 +8,5 @@ ipython==8.14.0; python_version >= '3.9' ipywidgets==8.1.0 pandas===1.3.5; python_version == '3.7' pandas==2.0.3; python_version > '3.7' -tqdm==4.65.0 +tqdm==4.65.1 db-dtypes==1.1.1 From 4e220489c53b2f98b3f57d9b5683ad90bd735425 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 9 Aug 2023 15:29:17 +0200 Subject: [PATCH 240/338] chore(deps): update dependency tqdm to v4.65.2 (#632) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 26eee7c3b97..e923db5ee6d 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -8,5 +8,5 @@ ipython==8.14.0; python_version >= '3.9' ipywidgets==8.1.0 pandas===1.3.5; python_version == '3.7' pandas==2.0.3; python_version > '3.7' -tqdm==4.65.1 +tqdm==4.65.2 db-dtypes==1.1.1 From 75e34de166fee8e91335302f9c31ea8179d62761 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 9 Aug 2023 17:56:33 +0200 Subject: [PATCH 241/338] chore(deps): update dependency tqdm to v4.66.0 (#633) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index e923db5ee6d..59a4665dea5 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -8,5 +8,5 @@ ipython==8.14.0; python_version >= '3.9' ipywidgets==8.1.0 pandas===1.3.5; python_version == '3.7' pandas==2.0.3; python_version > '3.7' -tqdm==4.65.2 +tqdm==4.66.0 db-dtypes==1.1.1 From b38143d6f3cd61f541d52f9d026904880eb40227 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 11 Aug 2023 01:50:06 +0200 Subject: [PATCH 242/338] chore(deps): update dependency tqdm to v4.66.1 (#634) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 59a4665dea5..972037accc9 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -8,5 +8,5 @@ ipython==8.14.0; python_version >= '3.9' ipywidgets==8.1.0 pandas===1.3.5; python_version == '3.7' pandas==2.0.3; python_version > '3.7' -tqdm==4.66.0 +tqdm==4.66.1 db-dtypes==1.1.1 From 2dc8510567d32c8119b0508aa22a2afe8449b57f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 9 Oct 2023 11:12:04 -0400 Subject: [PATCH 243/338] chore: [autoapprove] Update `black` and `isort` to latest versions (#688) Source-Link: https://github.com/googleapis/synthtool/commit/0c7b0333f44b2b7075447f43a121a12d15a7b76a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 Co-authored-by: Owl Bot --- bigquery_storage/snippets/append_rows_pending.py | 1 - bigquery_storage/snippets/customer_record_pb2.py | 1 - bigquery_storage/snippets/sample_data_pb2.py | 1 - 3 files changed, 3 deletions(-) diff --git a/bigquery_storage/snippets/append_rows_pending.py b/bigquery_storage/snippets/append_rows_pending.py index af780ffa5b9..1c391a0961b 100644 --- a/bigquery_storage/snippets/append_rows_pending.py +++ b/bigquery_storage/snippets/append_rows_pending.py @@ -39,7 +39,6 @@ def create_row_data(row_num: int, name: str): def append_rows_pending(project_id: str, dataset_id: str, table_id: str): - """Create a write stream, write some sample data, and commit the stream.""" write_client = bigquery_storage_v1.BigQueryWriteClient() parent = write_client.table_path(project_id, dataset_id, table_id) diff --git a/bigquery_storage/snippets/customer_record_pb2.py b/bigquery_storage/snippets/customer_record_pb2.py index d797784b0bf..a76fa02d313 100644 --- a/bigquery_storage/snippets/customer_record_pb2.py +++ b/bigquery_storage/snippets/customer_record_pb2.py @@ -31,7 +31,6 @@ _sym_db.RegisterMessage(CustomerRecord) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None _CUSTOMERRECORD._serialized_start = 25 _CUSTOMERRECORD._serialized_end = 81 diff --git a/bigquery_storage/snippets/sample_data_pb2.py b/bigquery_storage/snippets/sample_data_pb2.py index 85106843c8c..4e73f1dd48d 100644 --- a/bigquery_storage/snippets/sample_data_pb2.py +++ b/bigquery_storage/snippets/sample_data_pb2.py @@ -42,7 +42,6 @@ _sym_db.RegisterMessage(SampleData.SampleStruct) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None _SAMPLEDATA._serialized_start = 22 _SAMPLEDATA._serialized_end = 447 From 5b8e1216f048014c0bcc5bb251eafa35dab24b27 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 10 Nov 2023 02:17:43 +0100 Subject: [PATCH 244/338] chore(deps): update dependency pyarrow to v14 [security] (#703) * chore(deps): update dependency pyarrow to v14 [security] * pin pyarrow to 12.0.1 for python 3.7 --------- Co-authored-by: Lingqing Gan --- bigquery_storage/to_dataframe/requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 972037accc9..77f21dc7ff8 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,7 +1,8 @@ google-auth==2.22.0 google-cloud-bigquery-storage==2.22.0 google-cloud-bigquery==3.11.4 -pyarrow==12.0.1 +pyarrow==12.0.1; python_version == '3.7' +pyarrow==14.0.1; python_version >= '3.8' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython==8.14.0; python_version >= '3.9' From c53660ebaa156bc075aba334553218603003a284 Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Thu, 16 Nov 2023 19:53:42 -0500 Subject: [PATCH 245/338] fix(samples): catch EOFError in quickstart (#708) --- bigquery_storage/quickstart/quickstart.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/quickstart.py b/bigquery_storage/quickstart/quickstart.py index 7dda6bbfe48..6fbbdb3133d 100644 --- a/bigquery_storage/quickstart/quickstart.py +++ b/bigquery_storage/quickstart/quickstart.py @@ -75,9 +75,14 @@ def main(project_id="your-project-id", snapshot_millis=0): names = set() states = set() - for row in rows: - names.add(row["name"]) - states.add(row["state"]) + # fastavro returns EOFError instead of StopIterationError starting v1.8.4. + # See https://github.com/googleapis/python-bigquery-storage/pull/687 + try: + for row in rows: + names.add(row["name"]) + states.add(row["state"]) + except EOFError: + pass print("Got {} unique names in states: {}".format(len(names), ", ".join(states))) # [END bigquerystorage_quickstart] From c289b54e8947f2f13cf06bde5144e98631bfebc3 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 29 Nov 2023 09:47:54 -0800 Subject: [PATCH 246/338] chore(main): release 2.23.0 (#640) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c1d157157c6..39b1f2dedb0 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.23.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 302b781542e..83336f2a0dd 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.23.0" }, "snippets": [ { From 60745d7723b190bb6ac94b30cc5d8f184d1646b5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Dec 2023 14:25:40 -0800 Subject: [PATCH 247/338] chore(python): Add Python 3.12 (#710) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): Add Python 3.12 Source-Link: https://github.com/googleapis/synthtool/commit/af16e6d4672cc7b400f144de2fc3068b54ff47d2 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 * Add python 3.12 to setup.py * add dependency for 3.12 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update owlbot * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * undo owlbot change * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou Co-authored-by: Linchin --- bigquery_storage/quickstart/noxfile.py | 2 +- bigquery_storage/snippets/noxfile.py | 2 +- bigquery_storage/to_dataframe/noxfile.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index 1224cbe212e..3b7135946fd 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/bigquery_storage/snippets/noxfile.py b/bigquery_storage/snippets/noxfile.py index 1224cbe212e..3b7135946fd 100644 --- a/bigquery_storage/snippets/noxfile.py +++ b/bigquery_storage/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index 1224cbe212e..3b7135946fd 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From 6f1ff93967b04f12176ee6fabed6c192fd4bc9bb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 8 Dec 2023 13:00:33 -0500 Subject: [PATCH 248/338] fix: use `retry_async` instead of `retry` in async client (#713) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.12.0 PiperOrigin-RevId: 586356061 Source-Link: https://github.com/googleapis/googleapis/commit/72a1f55abaedbb62decd8ae8a44a4de223799c76 Source-Link: https://github.com/googleapis/googleapis-gen/commit/558a04bcd1cc0576e8fac1089e48e48b27ac161b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTU4YTA0YmNkMWNjMDU3NmU4ZmFjMTA4OWU0OGU0OGIyN2FjMTYxYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: added Generator API docs: updated doc for speech mode PiperOrigin-RevId: 586469693 Source-Link: https://github.com/googleapis/googleapis/commit/e8148d6d4bb02c907e06a784848ef731acb9e258 Source-Link: https://github.com/googleapis/googleapis-gen/commit/85136bd04383ed7172bb18b7b8d220dd7ff6b3a0 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiODUxMzZiZDA0MzgzZWQ3MTcyYmIxOGI3YjhkMjIwZGQ3ZmY2YjNhMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Lingqing Gan --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index 39b1f2dedb0..c1d157157c6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.23.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 83336f2a0dd..302b781542e 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.23.0" + "version": "0.1.0" }, "snippets": [ { From 13806d0fcb75b3d4969d01fc3b8209b840f2be1a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 12 Dec 2023 13:52:06 -0800 Subject: [PATCH 249/338] chore(main): release 2.24.0 (#714) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c1d157157c6..ab9997594b9 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.24.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 302b781542e..764c0a58458 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.24.0" }, "snippets": [ { From ba0cdf81889472208dc2dd1b4635e370f14985a2 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 14 Dec 2023 14:13:56 +0100 Subject: [PATCH 250/338] chore(deps): update all dependencies (#639) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * See https://pandas.pydata.org/docs/dev/whatsnew/v2.1.0.html#backwards-incompatible-api-changes * See https://pandas.pydata.org/docs/dev/whatsnew/v2.1.0.html#backwards-incompatible-api-changes --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 5 +++-- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 70613be0cfe..8e716e0feaa 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.0 +pytest==7.4.1 diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 2a4dccc0df2..00742565d64 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ google-cloud-testutils==1.3.3 -pytest==7.4.0 +pytest==7.4.1 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 70613be0cfe..8e716e0feaa 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.0 +pytest==7.4.1 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 77f21dc7ff8..a42ddda21f7 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -5,9 +5,10 @@ pyarrow==12.0.1; python_version == '3.7' pyarrow==14.0.1; python_version >= '3.8' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' -ipython==8.14.0; python_version >= '3.9' +ipython==8.15.0; python_version >= '3.9' ipywidgets==8.1.0 pandas===1.3.5; python_version == '3.7' -pandas==2.0.3; python_version > '3.7' +pandas===2.0.3; python_version == '3.8' +pandas==2.1.0; python_version >= '3.9' tqdm==4.66.1 db-dtypes==1.1.1 From 0bd79e0daf3c973f39119f67363f3b3d097663f4 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 14 Dec 2023 14:35:26 +0100 Subject: [PATCH 251/338] chore(deps): update all dependencies (#723) --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 4 ++-- bigquery_storage/snippets/requirements.txt | 4 ++-- .../to_dataframe/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 14 +++++++------- 6 files changed, 14 insertions(+), 14 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 8e716e0feaa..f9708e4b7cf 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.1 +pytest==7.4.3 diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 48c0a653c9e..bb7da0d820d 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.22.0 +google-cloud-bigquery-storage==2.24.0 diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 00742565d64..5d2f0c54224 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,2 @@ -google-cloud-testutils==1.3.3 -pytest==7.4.1 +google-cloud-testutils==1.4.0 +pytest==7.4.3 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 2833567bcc4..a9d7fc0523f 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.22.0 -google-cloud-bigquery==3.11.4 +google-cloud-bigquery-storage==2.24.0 +google-cloud-bigquery==3.14.1 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 8e716e0feaa..f9708e4b7cf 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.1 +pytest==7.4.3 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index a42ddda21f7..ffa769fa3da 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,14 +1,14 @@ -google-auth==2.22.0 -google-cloud-bigquery-storage==2.22.0 -google-cloud-bigquery==3.11.4 +google-auth==2.25.2 +google-cloud-bigquery-storage==2.24.0 +google-cloud-bigquery==3.14.1 pyarrow==12.0.1; python_version == '3.7' pyarrow==14.0.1; python_version >= '3.8' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' -ipython==8.15.0; python_version >= '3.9' -ipywidgets==8.1.0 +ipython==8.18.1; python_version >= '3.9' +ipywidgets==8.1.1 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' -pandas==2.1.0; python_version >= '3.9' +pandas==2.1.4; python_version >= '3.9' tqdm==4.66.1 -db-dtypes==1.1.1 +db-dtypes==1.2.0 From 8fdd6e5105095e66055fff8352521ae5757c4d30 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 12 Jan 2024 12:25:53 -0500 Subject: [PATCH 252/338] feat: add ability to request compressed ReadRowsResponse rows (#728) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add ability to request compressed ReadRowsResponse rows This change allows the client to request raw lz4 compression of the ReadRowsResponse rows data for both ArrowRecordBatches and Avro rows. PiperOrigin-RevId: 597000088 Source-Link: https://github.com/googleapis/googleapis/commit/341d70f9f3ac6c042309d9bc3c52edc94d95b5fb Source-Link: https://github.com/googleapis/googleapis-gen/commit/01713f3f5534acc78f04d59e13c0668c8129bf03 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDE3MTNmM2Y1NTM0YWNjNzhmMDRkNTllMTNjMDY2OGM4MTI5YmYwMyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index ab9997594b9..c1d157157c6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.24.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 764c0a58458..302b781542e 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.24.0" + "version": "0.1.0" }, "snippets": [ { From c30a69ec4c7869b1e7da4e40c3771e3d65c4e44f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 5 Mar 2024 11:37:27 -0800 Subject: [PATCH 253/338] feat: Add include_recaptcha_script for as a new action in firewall policies (#753) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add include_recaptcha_script for as a new action in firewall policies PiperOrigin-RevId: 612851792 Source-Link: https://github.com/googleapis/googleapis/commit/49ea2c0fc42dd48996b833f05a258ad7e8590d3d Source-Link: https://github.com/googleapis/googleapis-gen/commit/460fdcbbbe00f35b1c591b1f3ef0c77ebd3ce277 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDYwZmRjYmJiZTAwZjM1YjFjNTkxYjFmM2VmMGM3N2ViZDNjZTI3NyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- ...age_v1_generated_big_query_read_create_read_session_async.py | 2 +- ...rage_v1_generated_big_query_read_create_read_session_sync.py | 2 +- ...gquerystorage_v1_generated_big_query_read_read_rows_async.py | 2 +- ...igquerystorage_v1_generated_big_query_read_read_rows_sync.py | 2 +- ...orage_v1_generated_big_query_read_split_read_stream_async.py | 2 +- ...torage_v1_generated_big_query_read_split_read_stream_sync.py | 2 +- ...erystorage_v1_generated_big_query_write_append_rows_async.py | 2 +- ...uerystorage_v1_generated_big_query_write_append_rows_sync.py | 2 +- ...enerated_big_query_write_batch_commit_write_streams_async.py | 2 +- ...generated_big_query_write_batch_commit_write_streams_sync.py | 2 +- ...ge_v1_generated_big_query_write_create_write_stream_async.py | 2 +- ...age_v1_generated_big_query_write_create_write_stream_sync.py | 2 +- ..._v1_generated_big_query_write_finalize_write_stream_async.py | 2 +- ...e_v1_generated_big_query_write_finalize_write_stream_sync.py | 2 +- ...uerystorage_v1_generated_big_query_write_flush_rows_async.py | 2 +- ...querystorage_v1_generated_big_query_write_flush_rows_sync.py | 2 +- ...orage_v1_generated_big_query_write_get_write_stream_async.py | 2 +- ...torage_v1_generated_big_query_write_get_write_stream_sync.py | 2 +- ...1beta2_generated_big_query_read_create_read_session_async.py | 2 +- ...v1beta2_generated_big_query_read_create_read_session_sync.py | 2 +- ...ystorage_v1beta2_generated_big_query_read_read_rows_async.py | 2 +- ...rystorage_v1beta2_generated_big_query_read_read_rows_sync.py | 2 +- ..._v1beta2_generated_big_query_read_split_read_stream_async.py | 2 +- ...e_v1beta2_generated_big_query_read_split_read_stream_sync.py | 2 +- ...orage_v1beta2_generated_big_query_write_append_rows_async.py | 2 +- ...torage_v1beta2_generated_big_query_write_append_rows_sync.py | 2 +- ...enerated_big_query_write_batch_commit_write_streams_async.py | 2 +- ...generated_big_query_write_batch_commit_write_streams_sync.py | 2 +- ...beta2_generated_big_query_write_create_write_stream_async.py | 2 +- ...1beta2_generated_big_query_write_create_write_stream_sync.py | 2 +- ...ta2_generated_big_query_write_finalize_write_stream_async.py | 2 +- ...eta2_generated_big_query_write_finalize_write_stream_sync.py | 2 +- ...torage_v1beta2_generated_big_query_write_flush_rows_async.py | 2 +- ...storage_v1beta2_generated_big_query_write_flush_rows_sync.py | 2 +- ..._v1beta2_generated_big_query_write_get_write_stream_async.py | 2 +- ...e_v1beta2_generated_big_query_write_get_write_stream_sync.py | 2 +- 36 files changed, 36 insertions(+), 36 deletions(-) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py index 6411ff6b7d4..b70da03de6d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py index ff2a64bede1..f722665a57a 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py index fe590e478c0..4057ec36aba 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py index 10224fa18fb..c80d0d4aa06 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py index 765abc3c1bb..3c961176d68 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py index 22d209a4d9d..f4fe475aba0 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py index 915076b6a0b..65ea3ade58d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py index f7cd0459ffe..7777db6818d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py index d1113092708..d4529cb9867 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py index af326fa89b4..1458002ba82 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py index 28a7ddf17b6..fe8d874d089 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py index ed298f92e6c..b731e2f7c94 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py index 6799e295f35..ad0549357c3 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py index c5b9cb129ab..3492253ba9a 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py index dac63130ff9..ee486bd5e04 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py index 9217fc938c1..cf94b598c40 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py index dda01971cf6..f1b5f6e854b 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py index 46a7d9275a7..314bfa49df3 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py index 377cd61bf40..2cc08be026d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py index 212f80b1dc6..70d121f21e3 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py index 696e2d62416..ede50645b29 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py index 1a2b4817080..dd7bf53ed55 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py index 5671fdc524e..54fbf14e7c8 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py index 687261a4731..6091da370f5 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py index 4668d76bd90..592bacedf6e 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py index a25d7fe374d..b9ae5dd7be0 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py index bb9175fb831..43b9f24f257 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py index a0aaa866fa3..d8f293bf2e1 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py index 57c20ad0cfc..7c0ed7e9e66 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py index a57b83cb74b..d8deeaf6a7a 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py index bac5a2f2dcd..28c2f48c0aa 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py index 19f459ac66c..8edca5d7b6d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py index 2927e788022..15788671762 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py index ea6ac1c63be..a2a5881231b 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py index 6069e667f64..6a1102b6040 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py index fd330df57c8..04ad30d9584 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 3f6f753f443013a47d87082fb6bd0c6a3e6cbd02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20Swe=C3=B1a=20=28Swast=29?= Date: Tue, 19 Mar 2024 13:01:45 -0500 Subject: [PATCH 254/338] chore: sort imports (#761) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: sort imports * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- bigquery_storage/quickstart/noxfile.py | 1 - bigquery_storage/quickstart/quickstart.py | 3 +-- bigquery_storage/snippets/append_rows_pending.py | 3 +-- bigquery_storage/snippets/append_rows_pending_test.py | 1 - bigquery_storage/snippets/append_rows_proto2.py | 3 +-- bigquery_storage/snippets/append_rows_proto2_test.py | 1 - bigquery_storage/snippets/conftest.py | 1 - bigquery_storage/snippets/noxfile.py | 1 - bigquery_storage/to_dataframe/noxfile.py | 1 - 9 files changed, 3 insertions(+), 12 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index 3b7135946fd..c36d5f2d81f 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -22,7 +22,6 @@ import nox - # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING # DO NOT EDIT THIS FILE EVER! diff --git a/bigquery_storage/quickstart/quickstart.py b/bigquery_storage/quickstart/quickstart.py index 6fbbdb3133d..2d065083655 100644 --- a/bigquery_storage/quickstart/quickstart.py +++ b/bigquery_storage/quickstart/quickstart.py @@ -17,8 +17,7 @@ def main(project_id="your-project-id", snapshot_millis=0): # [START bigquerystorage_quickstart] - from google.cloud.bigquery_storage import BigQueryReadClient - from google.cloud.bigquery_storage import types + from google.cloud.bigquery_storage import BigQueryReadClient, types # TODO(developer): Set the project_id variable. # project_id = 'your-project-id' diff --git a/bigquery_storage/snippets/append_rows_pending.py b/bigquery_storage/snippets/append_rows_pending.py index 1c391a0961b..13fdab6c0f3 100644 --- a/bigquery_storage/snippets/append_rows_pending.py +++ b/bigquery_storage/snippets/append_rows_pending.py @@ -19,8 +19,7 @@ """ from google.cloud import bigquery_storage_v1 -from google.cloud.bigquery_storage_v1 import types -from google.cloud.bigquery_storage_v1 import writer +from google.cloud.bigquery_storage_v1 import types, writer from google.protobuf import descriptor_pb2 # If you update the customer_record.proto protocol buffer definition, run: diff --git a/bigquery_storage/snippets/append_rows_pending_test.py b/bigquery_storage/snippets/append_rows_pending_test.py index dc0e690d61c..508ce60b5ca 100644 --- a/bigquery_storage/snippets/append_rows_pending_test.py +++ b/bigquery_storage/snippets/append_rows_pending_test.py @@ -20,7 +20,6 @@ from . import append_rows_pending - DIR = pathlib.Path(__file__).parent diff --git a/bigquery_storage/snippets/append_rows_proto2.py b/bigquery_storage/snippets/append_rows_proto2.py index a06c29d70de..667f775e63d 100644 --- a/bigquery_storage/snippets/append_rows_proto2.py +++ b/bigquery_storage/snippets/append_rows_proto2.py @@ -21,8 +21,7 @@ import decimal from google.cloud import bigquery_storage_v1 -from google.cloud.bigquery_storage_v1 import types -from google.cloud.bigquery_storage_v1 import writer +from google.cloud.bigquery_storage_v1 import types, writer from google.protobuf import descriptor_pb2 # If you make updates to the sample_data.proto protocol buffers definition, diff --git a/bigquery_storage/snippets/append_rows_proto2_test.py b/bigquery_storage/snippets/append_rows_proto2_test.py index d35e9a78912..f88c9248ccd 100644 --- a/bigquery_storage/snippets/append_rows_proto2_test.py +++ b/bigquery_storage/snippets/append_rows_proto2_test.py @@ -22,7 +22,6 @@ from . import append_rows_proto2 - DIR = pathlib.Path(__file__).parent diff --git a/bigquery_storage/snippets/conftest.py b/bigquery_storage/snippets/conftest.py index 531f0b9dc7e..b5a9c444456 100644 --- a/bigquery_storage/snippets/conftest.py +++ b/bigquery_storage/snippets/conftest.py @@ -16,7 +16,6 @@ import pytest import test_utils.prefixer - prefixer = test_utils.prefixer.Prefixer("python-bigquery-storage", "samples/snippets") diff --git a/bigquery_storage/snippets/noxfile.py b/bigquery_storage/snippets/noxfile.py index 3b7135946fd..c36d5f2d81f 100644 --- a/bigquery_storage/snippets/noxfile.py +++ b/bigquery_storage/snippets/noxfile.py @@ -22,7 +22,6 @@ import nox - # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING # DO NOT EDIT THIS FILE EVER! diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index 3b7135946fd..c36d5f2d81f 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -22,7 +22,6 @@ import nox - # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING # DO NOT EDIT THIS FILE EVER! From 3b023abd9a6f12b6334852551d4ec418bd067a9b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20Swe=C3=B1a=20=28Swast=29?= Date: Thu, 21 Mar 2024 10:34:19 -0500 Subject: [PATCH 255/338] chore: exclude sample-specific formatting from owlbot (#763) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: exclude sample-specific formatting from owlbot * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- bigquery_storage/snippets/append_rows_pending.py | 3 ++- bigquery_storage/snippets/append_rows_pending_test.py | 3 ++- bigquery_storage/snippets/append_rows_proto2.py | 3 ++- bigquery_storage/snippets/append_rows_proto2_test.py | 3 ++- bigquery_storage/snippets/conftest.py | 3 ++- bigquery_storage/to_dataframe/read_table_bqstorage.py | 3 ++- 6 files changed, 12 insertions(+), 6 deletions(-) diff --git a/bigquery_storage/snippets/append_rows_pending.py b/bigquery_storage/snippets/append_rows_pending.py index 13fdab6c0f3..ac47828dcdd 100644 --- a/bigquery_storage/snippets/append_rows_pending.py +++ b/bigquery_storage/snippets/append_rows_pending.py @@ -18,9 +18,10 @@ using the low-level generated client for Python. """ +from google.protobuf import descriptor_pb2 + from google.cloud import bigquery_storage_v1 from google.cloud.bigquery_storage_v1 import types, writer -from google.protobuf import descriptor_pb2 # If you update the customer_record.proto protocol buffer definition, run: # diff --git a/bigquery_storage/snippets/append_rows_pending_test.py b/bigquery_storage/snippets/append_rows_pending_test.py index 508ce60b5ca..9af6957071f 100644 --- a/bigquery_storage/snippets/append_rows_pending_test.py +++ b/bigquery_storage/snippets/append_rows_pending_test.py @@ -15,9 +15,10 @@ import pathlib import random -from google.cloud import bigquery import pytest +from google.cloud import bigquery + from . import append_rows_pending DIR = pathlib.Path(__file__).parent diff --git a/bigquery_storage/snippets/append_rows_proto2.py b/bigquery_storage/snippets/append_rows_proto2.py index 667f775e63d..73ad221ab46 100644 --- a/bigquery_storage/snippets/append_rows_proto2.py +++ b/bigquery_storage/snippets/append_rows_proto2.py @@ -20,9 +20,10 @@ import datetime import decimal +from google.protobuf import descriptor_pb2 + from google.cloud import bigquery_storage_v1 from google.cloud.bigquery_storage_v1 import types, writer -from google.protobuf import descriptor_pb2 # If you make updates to the sample_data.proto protocol buffers definition, # run: diff --git a/bigquery_storage/snippets/append_rows_proto2_test.py b/bigquery_storage/snippets/append_rows_proto2_test.py index f88c9248ccd..904f17ddab9 100644 --- a/bigquery_storage/snippets/append_rows_proto2_test.py +++ b/bigquery_storage/snippets/append_rows_proto2_test.py @@ -17,9 +17,10 @@ import pathlib import random -from google.cloud import bigquery import pytest +from google.cloud import bigquery + from . import append_rows_proto2 DIR = pathlib.Path(__file__).parent diff --git a/bigquery_storage/snippets/conftest.py b/bigquery_storage/snippets/conftest.py index b5a9c444456..a186291ef07 100644 --- a/bigquery_storage/snippets/conftest.py +++ b/bigquery_storage/snippets/conftest.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud import bigquery import pytest import test_utils.prefixer +from google.cloud import bigquery + prefixer = test_utils.prefixer.Prefixer("python-bigquery-storage", "samples/snippets") diff --git a/bigquery_storage/to_dataframe/read_table_bqstorage.py b/bigquery_storage/to_dataframe/read_table_bqstorage.py index a0b1c49bc2e..31b21618b49 100644 --- a/bigquery_storage/to_dataframe/read_table_bqstorage.py +++ b/bigquery_storage/to_dataframe/read_table_bqstorage.py @@ -21,9 +21,10 @@ def read_table(your_project_id): your_project_id = original_your_project_id # [START bigquerystorage_pandas_tutorial_read_session] + import pandas + from google.cloud import bigquery_storage from google.cloud.bigquery_storage import types - import pandas bqstorageclient = bigquery_storage.BigQueryReadClient() From c83761108c775e049d8dba11102a2f96330612c0 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 5 Apr 2024 21:25:03 +0200 Subject: [PATCH 256/338] chore(deps): update all dependencies (#725) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * pin ipython version for python 3.9 because later versions dropped support for python 3.9 * pin pytest for python 3.7 * pin pytest for python 3.7 * pin pytest for python 3.7 --------- Co-authored-by: Owl Bot Co-authored-by: Lingqing Gan --- bigquery_storage/quickstart/requirements-test.txt | 3 ++- bigquery_storage/snippets/requirements-test.txt | 3 ++- bigquery_storage/snippets/requirements.txt | 2 +- .../to_dataframe/requirements-test.txt | 3 ++- bigquery_storage/to_dataframe/requirements.txt | 15 ++++++++------- 5 files changed, 15 insertions(+), 11 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index f9708e4b7cf..54cbac52bf6 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1 +1,2 @@ -pytest==7.4.3 +pytest==7.4.3; python_version == '3.7' +pytest==8.1.1; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 5d2f0c54224..2bbf79ffb5d 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,2 +1,3 @@ google-cloud-testutils==1.4.0 -pytest==7.4.3 +pytest==7.4.3; python_version == '3.7' +pytest==8.1.1; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index a9d7fc0523f..2de1e429b4d 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.24.0 -google-cloud-bigquery==3.14.1 +google-cloud-bigquery==3.20.1 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index f9708e4b7cf..54cbac52bf6 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1 +1,2 @@ -pytest==7.4.3 +pytest==7.4.3; python_version == '3.7' +pytest==8.1.1; python_version >= '3.8' diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index ffa769fa3da..ec19657d6c3 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,14 +1,15 @@ -google-auth==2.25.2 +google-auth==2.29.0 google-cloud-bigquery-storage==2.24.0 -google-cloud-bigquery==3.14.1 +google-cloud-bigquery==3.20.1 pyarrow==12.0.1; python_version == '3.7' -pyarrow==14.0.1; python_version >= '3.8' +pyarrow==15.0.2; python_version >= '3.8' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' -ipython==8.18.1; python_version >= '3.9' -ipywidgets==8.1.1 +ipython==8.18.1; python_version == '3.9' +ipython==8.23.0; python_version >= '3.10' +ipywidgets==8.1.2 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' -pandas==2.1.4; python_version >= '3.9' -tqdm==4.66.1 +pandas==2.2.1; python_version >= '3.9' +tqdm==4.66.2 db-dtypes==1.2.0 From 9b1a62994d468d1f0f2bab049fcfb19b8402d748 Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Thu, 11 Apr 2024 07:40:24 -0700 Subject: [PATCH 257/338] fix: use triple equal for pinned requirements (#769) --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 54cbac52bf6..d0b91137cdc 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==7.4.3; python_version == '3.7' +pytest===7.4.3; python_version == '3.7' pytest==8.1.1; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 2bbf79ffb5d..f77950bb2b7 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,3 +1,3 @@ google-cloud-testutils==1.4.0 -pytest==7.4.3; python_version == '3.7' +pytest===7.4.3; python_version == '3.7' pytest==8.1.1; python_version >= '3.8' diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 54cbac52bf6..d0b91137cdc 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==7.4.3; python_version == '3.7' +pytest===7.4.3; python_version == '3.7' pytest==8.1.1; python_version >= '3.8' diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index ec19657d6c3..76f2c999112 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,11 +1,11 @@ google-auth==2.29.0 google-cloud-bigquery-storage==2.24.0 google-cloud-bigquery==3.20.1 -pyarrow==12.0.1; python_version == '3.7' +pyarrow===12.0.1; python_version == '3.7' pyarrow==15.0.2; python_version >= '3.8' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' -ipython==8.18.1; python_version == '3.9' +ipython===8.18.1; python_version == '3.9' ipython==8.23.0; python_version >= '3.10' ipywidgets==8.1.2 pandas===1.3.5; python_version == '3.7' From 210348e1eb322ede04cd5347ff8370d92d1244b0 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 27 Apr 2024 01:15:29 +0200 Subject: [PATCH 258/338] chore(deps): update all dependencies (#766) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Lingqing Gan --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 76f2c999112..47eb7d2ee16 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -10,6 +10,6 @@ ipython==8.23.0; python_version >= '3.10' ipywidgets==8.1.2 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' -pandas==2.2.1; python_version >= '3.9' +pandas==2.2.2; python_version >= '3.9' tqdm==4.66.2 db-dtypes==1.2.0 From d5b0646d9774d09665391f55adcde7f399c07820 Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Tue, 30 Apr 2024 14:28:49 -0700 Subject: [PATCH 259/338] feat: add stream write samples for range (#780) * feat: add stream write samples for range * lint --- .../snippets/append_rows_proto2.py | 8 ++++ bigquery_storage/snippets/sample_data.proto | 11 +++++- bigquery_storage/snippets/sample_data_pb2.py | 39 +++++-------------- .../snippets/sample_data_schema.json | 5 +++ 4 files changed, 32 insertions(+), 31 deletions(-) diff --git a/bigquery_storage/snippets/append_rows_proto2.py b/bigquery_storage/snippets/append_rows_proto2.py index 73ad221ab46..7f3d88a7a3c 100644 --- a/bigquery_storage/snippets/append_rows_proto2.py +++ b/bigquery_storage/snippets/append_rows_proto2.py @@ -214,6 +214,14 @@ def append_rows_proto2(project_id: str, dataset_id: str, table_id: str): row.struct_list.append(sub_message) proto_rows.serialized_rows.append(row.SerializeToString()) + row = sample_data_pb2.SampleData() + row.row_num = 16 + date_value = datetime.date(2021, 8, 8) + epoch_value = datetime.date(1970, 1, 1) + delta = date_value - epoch_value + row.range_date.start = delta.days + proto_rows.serialized_rows.append(row.SerializeToString()) + request = types.AppendRowsRequest() request.offset = 12 proto_data = types.AppendRowsRequest.ProtoData() diff --git a/bigquery_storage/snippets/sample_data.proto b/bigquery_storage/snippets/sample_data.proto index 3e9f19cefff..6f0bb93a65c 100644 --- a/bigquery_storage/snippets/sample_data.proto +++ b/bigquery_storage/snippets/sample_data.proto @@ -29,6 +29,11 @@ message SampleData { optional int64 sub_int_col = 1; } + message RangeValue { + optional int32 start = 1; + optional int32 end = 2; + } + // The following types map directly between protocol buffers and their // corresponding BigQuery data types. optional bool bool_col = 1; @@ -55,7 +60,11 @@ message SampleData { optional SampleStruct struct_col = 14; repeated SampleStruct struct_list = 15; + // Range types, see: + // https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#range_type + optional RangeValue range_date = 16; + // Use the required keyword for client-side validation of required fields. - required int64 row_num = 16; + required int64 row_num = 17; } // [END bigquerystorage_append_rows_raw_proto2_definition] diff --git a/bigquery_storage/snippets/sample_data_pb2.py b/bigquery_storage/snippets/sample_data_pb2.py index 4e73f1dd48d..19707cf0be0 100644 --- a/bigquery_storage/snippets/sample_data_pb2.py +++ b/bigquery_storage/snippets/sample_data_pb2.py @@ -2,10 +2,9 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: sample_data.proto """Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) @@ -14,37 +13,17 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x11sample_data.proto"\xa9\x03\n\nSampleData\x12\x10\n\x08\x62ool_col\x18\x01 \x01(\x08\x12\x11\n\tbytes_col\x18\x02 \x01(\x0c\x12\x13\n\x0b\x66loat64_col\x18\x03 \x01(\x01\x12\x11\n\tint64_col\x18\x04 \x01(\x03\x12\x12\n\nstring_col\x18\x05 \x01(\t\x12\x10\n\x08\x64\x61te_col\x18\x06 \x01(\x05\x12\x14\n\x0c\x64\x61tetime_col\x18\x07 \x01(\t\x12\x15\n\rgeography_col\x18\x08 \x01(\t\x12\x13\n\x0bnumeric_col\x18\t \x01(\t\x12\x16\n\x0e\x62ignumeric_col\x18\n \x01(\t\x12\x10\n\x08time_col\x18\x0b \x01(\t\x12\x15\n\rtimestamp_col\x18\x0c \x01(\x03\x12\x12\n\nint64_list\x18\r \x03(\x03\x12,\n\nstruct_col\x18\x0e \x01(\x0b\x32\x18.SampleData.SampleStruct\x12-\n\x0bstruct_list\x18\x0f \x03(\x0b\x32\x18.SampleData.SampleStruct\x12\x0f\n\x07row_num\x18\x10 \x02(\x03\x1a#\n\x0cSampleStruct\x12\x13\n\x0bsub_int_col\x18\x01 \x01(\x03' + b'\n\x11sample_data.proto"\xff\x03\n\nSampleData\x12\x10\n\x08\x62ool_col\x18\x01 \x01(\x08\x12\x11\n\tbytes_col\x18\x02 \x01(\x0c\x12\x13\n\x0b\x66loat64_col\x18\x03 \x01(\x01\x12\x11\n\tint64_col\x18\x04 \x01(\x03\x12\x12\n\nstring_col\x18\x05 \x01(\t\x12\x10\n\x08\x64\x61te_col\x18\x06 \x01(\x05\x12\x14\n\x0c\x64\x61tetime_col\x18\x07 \x01(\t\x12\x15\n\rgeography_col\x18\x08 \x01(\t\x12\x13\n\x0bnumeric_col\x18\t \x01(\t\x12\x16\n\x0e\x62ignumeric_col\x18\n \x01(\t\x12\x10\n\x08time_col\x18\x0b \x01(\t\x12\x15\n\rtimestamp_col\x18\x0c \x01(\x03\x12\x12\n\nint64_list\x18\r \x03(\x03\x12,\n\nstruct_col\x18\x0e \x01(\x0b\x32\x18.SampleData.SampleStruct\x12-\n\x0bstruct_list\x18\x0f \x03(\x0b\x32\x18.SampleData.SampleStruct\x12*\n\nrange_date\x18\x10 \x01(\x0b\x32\x16.SampleData.RangeValue\x12\x0f\n\x07row_num\x18\x11 \x02(\x03\x1a#\n\x0cSampleStruct\x12\x13\n\x0bsub_int_col\x18\x01 \x01(\x03\x1a(\n\nRangeValue\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05' ) - -_SAMPLEDATA = DESCRIPTOR.message_types_by_name["SampleData"] -_SAMPLEDATA_SAMPLESTRUCT = _SAMPLEDATA.nested_types_by_name["SampleStruct"] -SampleData = _reflection.GeneratedProtocolMessageType( - "SampleData", - (_message.Message,), - { - "SampleStruct": _reflection.GeneratedProtocolMessageType( - "SampleStruct", - (_message.Message,), - { - "DESCRIPTOR": _SAMPLEDATA_SAMPLESTRUCT, - "__module__": "sample_data_pb2" - # @@protoc_insertion_point(class_scope:SampleData.SampleStruct) - }, - ), - "DESCRIPTOR": _SAMPLEDATA, - "__module__": "sample_data_pb2" - # @@protoc_insertion_point(class_scope:SampleData) - }, -) -_sym_db.RegisterMessage(SampleData) -_sym_db.RegisterMessage(SampleData.SampleStruct) - +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "sample_data_pb2", globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None _SAMPLEDATA._serialized_start = 22 - _SAMPLEDATA._serialized_end = 447 - _SAMPLEDATA_SAMPLESTRUCT._serialized_start = 412 - _SAMPLEDATA_SAMPLESTRUCT._serialized_end = 447 + _SAMPLEDATA._serialized_end = 533 + _SAMPLEDATA_SAMPLESTRUCT._serialized_start = 456 + _SAMPLEDATA_SAMPLESTRUCT._serialized_end = 491 + _SAMPLEDATA_RANGEVALUE._serialized_start = 493 + _SAMPLEDATA_RANGEVALUE._serialized_end = 533 # @@protoc_insertion_point(module_scope) diff --git a/bigquery_storage/snippets/sample_data_schema.json b/bigquery_storage/snippets/sample_data_schema.json index ba6ba102261..40efb7122b5 100644 --- a/bigquery_storage/snippets/sample_data_schema.json +++ b/bigquery_storage/snippets/sample_data_schema.json @@ -72,5 +72,10 @@ {"name": "sub_int_col", "type": "INTEGER"} ], "mode": "REPEATED" + }, + { + "name": "range_date", + "type": "RANGE", + "rangeElementType": {"type": "DATE"} } ] From cbea008be486feb8e4a953560c66271399282531 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 1 May 2024 19:10:17 +0200 Subject: [PATCH 260/338] chore(deps): update all dependencies (#778) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Lingqing Gan --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/snippets/sample_data_pb2.py | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 6 +++--- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index d0b91137cdc..0dd7b78b098 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.1.1; python_version >= '3.8' +pytest==8.1.2; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index f77950bb2b7..f5652da845f 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,3 +1,3 @@ google-cloud-testutils==1.4.0 pytest===7.4.3; python_version == '3.7' -pytest==8.1.1; python_version >= '3.8' +pytest==8.1.2; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 2de1e429b4d..b34ef0b6021 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.24.0 -google-cloud-bigquery==3.20.1 +google-cloud-bigquery==3.21.0 diff --git a/bigquery_storage/snippets/sample_data_pb2.py b/bigquery_storage/snippets/sample_data_pb2.py index 19707cf0be0..0f261b34343 100644 --- a/bigquery_storage/snippets/sample_data_pb2.py +++ b/bigquery_storage/snippets/sample_data_pb2.py @@ -2,10 +2,10 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: sample_data.proto """Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder # @@protoc_insertion_point(imports) diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index d0b91137cdc..0dd7b78b098 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.1.1; python_version >= '3.8' +pytest==8.1.2; python_version >= '3.8' diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 47eb7d2ee16..828123dbb1e 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,12 +1,12 @@ google-auth==2.29.0 google-cloud-bigquery-storage==2.24.0 -google-cloud-bigquery==3.20.1 +google-cloud-bigquery==3.21.0 pyarrow===12.0.1; python_version == '3.7' -pyarrow==15.0.2; python_version >= '3.8' +pyarrow==16.0.0; python_version >= '3.8' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' -ipython==8.23.0; python_version >= '3.10' +ipython==8.24.0; python_version >= '3.10' ipywidgets==8.1.2 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' From 53e166cb1fbaf2f8c496a62bc9ed00fa4789d77e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 3 May 2024 09:11:42 -0700 Subject: [PATCH 261/338] chore(main): release 2.25.0 (#731) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c1d157157c6..e74497ef31a 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.25.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 302b781542e..2f46ad87684 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.25.0" }, "snippets": [ { From 059c8c9f2f60c1c2e24c3a9be9b597ce3f64f047 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 6 May 2024 14:29:18 +0200 Subject: [PATCH 262/338] chore(deps): update all dependencies (#781) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Chalmer Lowe --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 0dd7b78b098..5804eb10c29 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.1.2; python_version >= '3.8' +pytest==8.2.0; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index f5652da845f..001770f3075 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,3 +1,3 @@ google-cloud-testutils==1.4.0 pytest===7.4.3; python_version == '3.7' -pytest==8.1.2; python_version >= '3.8' +pytest==8.2.0; python_version >= '3.8' diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 0dd7b78b098..5804eb10c29 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.1.2; python_version >= '3.8' +pytest==8.2.0; python_version >= '3.8' From fa040de9557555ada5b98eb59aa80361b4096c45 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 7 May 2024 01:58:18 +0200 Subject: [PATCH 263/338] chore(deps): update dependency tqdm to v4.66.3 [security] (#783) Co-authored-by: Lingqing Gan --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 828123dbb1e..e4cf1059354 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -11,5 +11,5 @@ ipywidgets==8.1.2 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' pandas==2.2.2; python_version >= '3.9' -tqdm==4.66.2 +tqdm==4.66.3 db-dtypes==1.2.0 From 141615628c6469c0edb101f97c4a732839f459d7 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 17 May 2024 00:42:41 +0200 Subject: [PATCH 264/338] chore(deps): update all dependencies (#784) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Lingqing Gan --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index bb7da0d820d..5ca374593df 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.24.0 +google-cloud-bigquery-storage==2.25.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index b34ef0b6021..b6b286a152e 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.24.0 +google-cloud-bigquery-storage==2.25.0 google-cloud-bigquery==3.21.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index e4cf1059354..8138fd44c15 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.29.0 -google-cloud-bigquery-storage==2.24.0 +google-cloud-bigquery-storage==2.25.0 google-cloud-bigquery==3.21.0 pyarrow===12.0.1; python_version == '3.7' pyarrow==16.0.0; python_version >= '3.8' From 20e4b99dc4064969ac78fda8ea9f015c3829cc9d Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 23 May 2024 21:49:59 +0200 Subject: [PATCH 265/338] chore(deps): update all dependencies (#787) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index b6b286a152e..7aa134af6ca 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.25.0 -google-cloud-bigquery==3.21.0 +google-cloud-bigquery==3.23.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 8138fd44c15..663ea7d98ca 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,8 +1,8 @@ google-auth==2.29.0 google-cloud-bigquery-storage==2.25.0 -google-cloud-bigquery==3.21.0 +google-cloud-bigquery==3.23.0 pyarrow===12.0.1; python_version == '3.7' -pyarrow==16.0.0; python_version >= '3.8' +pyarrow==16.1.0; python_version >= '3.8' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' @@ -11,5 +11,5 @@ ipywidgets==8.1.2 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' pandas==2.2.2; python_version >= '3.9' -tqdm==4.66.3 +tqdm==4.66.4 db-dtypes==1.2.0 From 28fe731f2995808d2c06689de80b38831db9cc80 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 29 May 2024 22:34:07 +0200 Subject: [PATCH 266/338] chore(deps): update all dependencies (#789) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 5804eb10c29..74f5488134b 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.2.0; python_version >= '3.8' +pytest==8.2.1; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 001770f3075..227031b0141 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,3 +1,3 @@ google-cloud-testutils==1.4.0 pytest===7.4.3; python_version == '3.7' -pytest==8.2.0; python_version >= '3.8' +pytest==8.2.1; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 7aa134af6ca..1ae135c1d61 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.25.0 -google-cloud-bigquery==3.23.0 +google-cloud-bigquery==3.23.1 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 5804eb10c29..74f5488134b 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.2.0; python_version >= '3.8' +pytest==8.2.1; python_version >= '3.8' diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 663ea7d98ca..34ad7f2ba58 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,6 @@ google-auth==2.29.0 google-cloud-bigquery-storage==2.25.0 -google-cloud-bigquery==3.23.0 +google-cloud-bigquery==3.23.1 pyarrow===12.0.1; python_version == '3.7' pyarrow==16.1.0; python_version >= '3.8' ipython===7.31.1; python_version == '3.7' From 5f98494ec915e6e3221546857f7e6dd57c4932b8 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 31 May 2024 18:54:37 +0200 Subject: [PATCH 267/338] chore(deps): update all dependencies (#790) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 34ad7f2ba58..67745e55e7d 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -7,7 +7,7 @@ ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' ipython==8.24.0; python_version >= '3.10' -ipywidgets==8.1.2 +ipywidgets==8.1.3 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' pandas==2.2.2; python_version >= '3.9' From c612114c303e75ea25c522ffba63d7088e31833a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 3 Jun 2024 19:24:56 +0200 Subject: [PATCH 268/338] chore(deps): update all dependencies (#791) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 67745e55e7d..4e8edebf352 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -6,7 +6,7 @@ pyarrow==16.1.0; python_version >= '3.8' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' -ipython==8.24.0; python_version >= '3.10' +ipython==8.25.0; python_version >= '3.10' ipywidgets==8.1.3 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' From b72202b90c3a2c23b217cb9130869f1b86ba6413 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 18 Jul 2024 00:02:51 +0200 Subject: [PATCH 269/338] chore(deps): update all dependencies (#801) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Lingqing Gan Co-authored-by: Owl Bot --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 8 ++++---- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 74f5488134b..4c9ee747950 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.2.1; python_version >= '3.8' +pytest==8.2.2; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 227031b0141..54496799310 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,3 +1,3 @@ google-cloud-testutils==1.4.0 pytest===7.4.3; python_version == '3.7' -pytest==8.2.1; python_version >= '3.8' +pytest==8.2.2; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 1ae135c1d61..88f8dd3e2c4 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.25.0 -google-cloud-bigquery==3.23.1 +google-cloud-bigquery==3.25.0 diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 74f5488134b..4c9ee747950 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.2.1; python_version >= '3.8' +pytest==8.2.2; python_version >= '3.8' diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 4e8edebf352..f36bff6fac7 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,12 +1,12 @@ -google-auth==2.29.0 +google-auth==2.32.0 google-cloud-bigquery-storage==2.25.0 -google-cloud-bigquery==3.23.1 +google-cloud-bigquery==3.25.0 pyarrow===12.0.1; python_version == '3.7' -pyarrow==16.1.0; python_version >= '3.8' +pyarrow==17.0.0; python_version >= '3.8' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' -ipython==8.25.0; python_version >= '3.10' +ipython==8.26.0; python_version >= '3.10' ipywidgets==8.1.3 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' From 8dd4cf22c4a95ddd70e23ac9158a9c89ca8c2fa3 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 23 Jul 2024 01:33:29 +0200 Subject: [PATCH 270/338] chore(deps): update dependency pytest to v8.3.1 (#803) --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 4c9ee747950..418361bd8c9 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.2.2; python_version >= '3.8' +pytest==8.3.1; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 54496799310..f5060b3cbda 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,3 +1,3 @@ google-cloud-testutils==1.4.0 pytest===7.4.3; python_version == '3.7' -pytest==8.2.2; python_version >= '3.8' +pytest==8.3.1; python_version >= '3.8' diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 4c9ee747950..418361bd8c9 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.2.2; python_version >= '3.8' +pytest==8.3.1; python_version >= '3.8' From e4cb5045388061cf534999e8500f1efbbc57f756 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 29 Jul 2024 19:06:59 +0200 Subject: [PATCH 271/338] chore(deps): update dependency pytest to v8.3.2 (#805) --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 418361bd8c9..16e6c409623 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.3.1; python_version >= '3.8' +pytest==8.3.2; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index f5060b3cbda..c02f8349d80 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,3 +1,3 @@ google-cloud-testutils==1.4.0 pytest===7.4.3; python_version == '3.7' -pytest==8.3.1; python_version >= '3.8' +pytest==8.3.2; python_version >= '3.8' diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 418361bd8c9..16e6c409623 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.3.1; python_version >= '3.8' +pytest==8.3.2; python_version >= '3.8' From bae1244c7fd8d435b213a6d749ac6a6e6d294793 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 13 Aug 2024 16:29:11 +0200 Subject: [PATCH 272/338] chore(deps): update dependency tqdm to v4.66.5 (#807) --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index f36bff6fac7..6df28865556 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -11,5 +11,5 @@ ipywidgets==8.1.3 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' pandas==2.2.2; python_version >= '3.9' -tqdm==4.66.4 +tqdm==4.66.5 db-dtypes==1.2.0 From 22177d40c7f0cd01f0c0d162b8d45e8d9693d28c Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 13 Aug 2024 17:44:10 +0200 Subject: [PATCH 273/338] chore(deps): update all dependencies (#809) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 6df28865556..5029c81fa58 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.32.0 +google-auth==2.33.0 google-cloud-bigquery-storage==2.25.0 google-cloud-bigquery==3.25.0 pyarrow===12.0.1; python_version == '3.7' @@ -12,4 +12,4 @@ pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' pandas==2.2.2; python_version >= '3.9' tqdm==4.66.5 -db-dtypes==1.2.0 +db-dtypes==1.3.0 From d774939bdd743f6ea35bd34c65c2f3b3df018b94 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 16 Aug 2024 09:29:35 -0700 Subject: [PATCH 274/338] feat: Add BigQuery Metastore Partition Service API version v1alpha (#779) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add BigQuery Metastore Partition Service API version v1alpha PiperOrigin-RevId: 662212485 Source-Link: https://github.com/googleapis/googleapis/commit/456a812fbc03ef50e253dc85f2b2c22a8af96d36 Source-Link: https://github.com/googleapis/googleapis-gen/commit/2ec266e6da03208a76b0fd6001ba7df93dae44e6 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMmVjMjY2ZTZkYTAzMjA4YTc2YjBmZDYwMDFiYTdkZjkzZGFlNDRlNiJ9 chore: Update gapic-generator-python to v1.18.5 PiperOrigin-RevId: 661268868 Source-Link: https://github.com/googleapis/googleapis/commit/f7d214cb08cd7d9b018d44564a8b184263f64177 Source-Link: https://github.com/googleapis/googleapis-gen/commit/79a8411bbdb25a983fa3aae8c0e14327df129f94 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 chore: Update gapic-generator-python to v1.18.4 PiperOrigin-RevId: 657207628 Source-Link: https://github.com/googleapis/googleapis/commit/33fe71e5a2061402283e0455636a98e5b78eaf7f Source-Link: https://github.com/googleapis/googleapis-gen/commit/e02739d122ed15bd5ef5771c57f12a83d47a1dda Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTAyNzM5ZDEyMmVkMTViZDVlZjU3NzFjNTdmMTJhODNkNDdhMWRkYSJ9 chore: Update gapic-generator-python to v1.18.3 PiperOrigin-RevId: 656040068 Source-Link: https://github.com/googleapis/googleapis/commit/3f4e29a88f2e1f412439e61c48c88f81dec0bbbf Source-Link: https://github.com/googleapis/googleapis-gen/commit/b8feb2109dde7b0938c22c993d002251ac6714dc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjhmZWIyMTA5ZGRlN2IwOTM4YzIyYzk5M2QwMDIyNTFhYzY3MTRkYyJ9 chore: Update gapic-generator-python to v1.18.3 PiperOrigin-RevId: 655567917 Source-Link: https://github.com/googleapis/googleapis/commit/43aa65e3897557c11d947f3133ddb76e5c4b2a6c Source-Link: https://github.com/googleapis/googleapis-gen/commit/0e38378753074c0f66ff63348d6864929e104d5c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGUzODM3ODc1MzA3NGMwZjY2ZmY2MzM0OGQ2ODY0OTI5ZTEwNGQ1YyJ9 feat(spanner): Add support for Cloud Spanner Scheduled Backups PiperOrigin-RevId: 649277844 Source-Link: https://github.com/googleapis/googleapis/commit/fd7efa2da3860e813485e63661d3bdd21fc9ba82 Source-Link: https://github.com/googleapis/googleapis-gen/commit/50be251329d8db5b555626ebd4886721f547d3cc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTBiZTI1MTMyOWQ4ZGI1YjU1NTYyNmViZDQ4ODY3MjFmNTQ3ZDNjYyJ9 chore: Update gapic-generator-python to v1.18.2 PiperOrigin-RevId: 649219344 Source-Link: https://github.com/googleapis/googleapis/commit/7fe35b08099b6e9ced59591919af24faec7de20a Source-Link: https://github.com/googleapis/googleapis-gen/commit/7a6ae6441d1cb0610cf8e9832b9383a4fd52423f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2E2YWU2NDQxZDFjYjA2MTBjZjhlOTgzMmI5MzgzYTRmZDUyNDIzZiJ9 chore: Update gapic-generator-python to v1.18.0 PiperOrigin-RevId: 638650618 Source-Link: https://github.com/googleapis/googleapis/commit/6330f0389afdd04235c59898cc44f715b077aa25 Source-Link: https://github.com/googleapis/googleapis-gen/commit/44fa4f1979dc45c1778fd7caf13f8e61c6d1cae8 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDRmYTRmMTk3OWRjNDVjMTc3OGZkN2NhZjEzZjhlNjFjNmQxY2FlOCJ9 chore: Update gapic-generator-python to v1.17.1 PiperOrigin-RevId: 629071173 Source-Link: https://github.com/googleapis/googleapis/commit/4afa392105cc62e965631d15b772ff68454ecf1c Source-Link: https://github.com/googleapis/googleapis-gen/commit/16dbbb4d0457db5e61ac9f99b0d52a46154455ac Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTZkYmJiNGQwNDU3ZGI1ZTYxYWM5Zjk5YjBkNTJhNDYxNTQ0NTVhYyJ9 * update replacements in owlbot.py * remove v1alpha * update owlbot config * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix failing unit test by mocking the correct class's method --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou Co-authored-by: Lingqing Gan --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index e74497ef31a..c1d157157c6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.25.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 2f46ad87684..302b781542e 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.25.0" + "version": "0.1.0" }, "snippets": [ { From 11a5ffc70c99baec2e4fb5a9e71e372e45a6adce Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 22 Aug 2024 20:43:31 +0200 Subject: [PATCH 275/338] chore(deps): update all dependencies (#813) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 5029c81fa58..459560cc3cb 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.33.0 +google-auth==2.34.0 google-cloud-bigquery-storage==2.25.0 google-cloud-bigquery==3.25.0 pyarrow===12.0.1; python_version == '3.7' From ad59836a05df3afe88b2bcb05f00ef1c869d94c5 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 23 Aug 2024 23:22:33 +0200 Subject: [PATCH 276/338] chore(deps): update all dependencies (#816) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 459560cc3cb..999765a3bb9 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -7,7 +7,7 @@ ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' ipython==8.26.0; python_version >= '3.10' -ipywidgets==8.1.3 +ipywidgets==8.1.5 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' pandas==2.2.2; python_version >= '3.9' From 409ead891a0cf045a391500c5d75d20cf9b067b7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 09:43:06 -0700 Subject: [PATCH 277/338] feat: Add BigQuery Metastore Partition Service API version v1alpha (#817) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): Use `google.cloud.bigquery_storage` instead of `google.cloud.bigquery.storage` for v1alpha PiperOrigin-RevId: 666369744 Source-Link: https://github.com/googleapis/googleapis/commit/63947bfd92427cb607a2e335bd8fec23fe2cd529 Source-Link: https://github.com/googleapis/googleapis-gen/commit/f31ef2a6afdbff06593f6e6fec26430953f843ba Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZjMxZWYyYTZhZmRiZmYwNjU5M2Y2ZTZmZWMyNjQzMDk1M2Y4NDNiYSJ9 * feat: add documentation for partition value limit BREAKING CHANGE: make the client library gRPC only PiperOrigin-RevId: 666551276 Source-Link: https://github.com/googleapis/googleapis/commit/6f3c628e7fc39b5ca7186aba1a67ae39454d0752 Source-Link: https://github.com/googleapis/googleapis-gen/commit/9c599698f90ae56a61e38a266bd7705614c5dff6 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOWM1OTk2OThmOTBhZTU2YTYxZTM4YTI2NmJkNzcwNTYxNGM1ZGZmNiJ9 * build: Only generate gRPC clients for google/cloud/bigquery/storage/v1alpha PiperOrigin-RevId: 667933064 Source-Link: https://github.com/googleapis/googleapis/commit/3165e16f3b29a2a730264c7b5056aa6a48bdf0e7 Source-Link: https://github.com/googleapis/googleapis-gen/commit/bcf1bccd00d8d2315c945bba1d93fcfab1999779 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYmNmMWJjY2QwMGQ4ZDIzMTVjOTQ1YmJhMWQ5M2ZjZmFiMTk5OTc3OSJ9 * exclude replacements for v1alpha * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Update docs/index.rst; add gapic_version.py * docs: A comment for message `StreamMetastorePartitionsRequest` is changed docs: A comment for message `StreamMetastorePartitionsResponse` is changed docs: A comment for field `location_uri` in message `.google.cloud.bigquery.storage.v1alpha.StorageDescriptor` is changed PiperOrigin-RevId: 670602530 Source-Link: https://github.com/googleapis/googleapis/commit/9c6ceea874182fbfda3e59faba5df35906567c5b Source-Link: https://github.com/googleapis/googleapis-gen/commit/1ab1ed59ce43ba96e71cb589704339f3b8c6e524 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMWFiMWVkNTljZTQzYmE5NmU3MWNiNTg5NzA0MzM5ZjNiOGM2ZTUyNCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- ...batch_create_metastore_partitions_async.py | 58 ++ ..._batch_create_metastore_partitions_sync.py | 58 ++ ...batch_delete_metastore_partitions_async.py | 54 ++ ..._batch_delete_metastore_partitions_sync.py | 54 ++ ...batch_update_metastore_partitions_async.py | 57 ++ ..._batch_update_metastore_partitions_sync.py | 57 ++ ...service_list_metastore_partitions_async.py | 53 ++ ..._service_list_metastore_partitions_sync.py | 53 ++ ...rvice_stream_metastore_partitions_async.py | 64 ++ ...ervice_stream_metastore_partitions_sync.py | 64 ++ ...google.cloud.bigquery.storage.v1alpha.json | 782 ++++++++++++++++++ 11 files changed, 1354 insertions(+) create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_sync.py create mode 100644 bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_async.py new file mode 100644 index 00000000000..2840025ed36 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1alpha + + +async def sample_batch_create_metastore_partitions(): + # Create a client + client = bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient() + + # Initialize request argument(s) + requests = bigquery_storage_v1alpha.CreateMetastorePartitionRequest() + requests.parent = "parent_value" + requests.metastore_partition.values = ["values_value1", "values_value2"] + + request = bigquery_storage_v1alpha.BatchCreateMetastorePartitionsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = await client.batch_create_metastore_partitions(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py new file mode 100644 index 00000000000..fabe4a00fb0 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1alpha + + +def sample_batch_create_metastore_partitions(): + # Create a client + client = bigquery_storage_v1alpha.MetastorePartitionServiceClient() + + # Initialize request argument(s) + requests = bigquery_storage_v1alpha.CreateMetastorePartitionRequest() + requests.parent = "parent_value" + requests.metastore_partition.values = ["values_value1", "values_value2"] + + request = bigquery_storage_v1alpha.BatchCreateMetastorePartitionsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_create_metastore_partitions(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py new file mode 100644 index 00000000000..231e2997528 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1alpha + + +async def sample_batch_delete_metastore_partitions(): + # Create a client + client = bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient() + + # Initialize request argument(s) + partition_values = bigquery_storage_v1alpha.MetastorePartitionValues() + partition_values.values = ["values_value1", "values_value2"] + + request = bigquery_storage_v1alpha.BatchDeleteMetastorePartitionsRequest( + parent="parent_value", + partition_values=partition_values, + ) + + # Make the request + await client.batch_delete_metastore_partitions(request=request) + + +# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py new file mode 100644 index 00000000000..74242022f8a --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1alpha + + +def sample_batch_delete_metastore_partitions(): + # Create a client + client = bigquery_storage_v1alpha.MetastorePartitionServiceClient() + + # Initialize request argument(s) + partition_values = bigquery_storage_v1alpha.MetastorePartitionValues() + partition_values.values = ["values_value1", "values_value2"] + + request = bigquery_storage_v1alpha.BatchDeleteMetastorePartitionsRequest( + parent="parent_value", + partition_values=partition_values, + ) + + # Make the request + client.batch_delete_metastore_partitions(request=request) + + +# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_async.py new file mode 100644 index 00000000000..c1608f7784e --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1alpha + + +async def sample_batch_update_metastore_partitions(): + # Create a client + client = bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient() + + # Initialize request argument(s) + requests = bigquery_storage_v1alpha.UpdateMetastorePartitionRequest() + requests.metastore_partition.values = ["values_value1", "values_value2"] + + request = bigquery_storage_v1alpha.BatchUpdateMetastorePartitionsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = await client.batch_update_metastore_partitions(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py new file mode 100644 index 00000000000..8f2f2005b57 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1alpha + + +def sample_batch_update_metastore_partitions(): + # Create a client + client = bigquery_storage_v1alpha.MetastorePartitionServiceClient() + + # Initialize request argument(s) + requests = bigquery_storage_v1alpha.UpdateMetastorePartitionRequest() + requests.metastore_partition.values = ["values_value1", "values_value2"] + + request = bigquery_storage_v1alpha.BatchUpdateMetastorePartitionsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_update_metastore_partitions(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_async.py new file mode 100644 index 00000000000..344a5253cb7 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1alpha + + +async def sample_list_metastore_partitions(): + # Create a client + client = bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1alpha.ListMetastorePartitionsRequest( + parent="parent_value", + ) + + # Make the request + response = await client.list_metastore_partitions(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_sync.py new file mode 100644 index 00000000000..09cb3122a36 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1alpha + + +def sample_list_metastore_partitions(): + # Create a client + client = bigquery_storage_v1alpha.MetastorePartitionServiceClient() + + # Initialize request argument(s) + request = bigquery_storage_v1alpha.ListMetastorePartitionsRequest( + parent="parent_value", + ) + + # Make the request + response = client.list_metastore_partitions(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_async.py new file mode 100644 index 00000000000..310b0c6c483 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1alpha + + +async def sample_stream_metastore_partitions(): + # Create a client + client = bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1alpha.StreamMetastorePartitionsRequest( + parent="parent_value", + ) + + # This method expects an iterator which contains + # 'bigquery_storage_v1alpha.StreamMetastorePartitionsRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.stream_metastore_partitions(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + +# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_sync.py new file mode 100644 index 00000000000..4e4b85f3ca3 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1alpha + + +def sample_stream_metastore_partitions(): + # Create a client + client = bigquery_storage_v1alpha.MetastorePartitionServiceClient() + + # Initialize request argument(s) + request = bigquery_storage_v1alpha.StreamMetastorePartitionsRequest( + parent="parent_value", + ) + + # This method expects an iterator which contains + # 'bigquery_storage_v1alpha.StreamMetastorePartitionsRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.stream_metastore_partitions(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + +# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json new file mode 100644 index 00000000000..3c29286ee87 --- /dev/null +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json @@ -0,0 +1,782 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.bigquery.storage.v1alpha", + "version": "v1alpha" + } + ], + "language": "PYTHON", + "name": "google-cloud-bigquery-storage", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient", + "shortName": "MetastorePartitionServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient.batch_create_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchCreateMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "BatchCreateMetastorePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1alpha.types.BatchCreateMetastorePartitionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1alpha.types.BatchCreateMetastorePartitionsResponse", + "shortName": "batch_create_metastore_partitions" + }, + "description": "Sample for BatchCreateMetastorePartitions", + "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient", + "shortName": "MetastorePartitionServiceClient" + }, + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient.batch_create_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchCreateMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "BatchCreateMetastorePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1alpha.types.BatchCreateMetastorePartitionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1alpha.types.BatchCreateMetastorePartitionsResponse", + "shortName": "batch_create_metastore_partitions" + }, + "description": "Sample for BatchCreateMetastorePartitions", + "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient", + "shortName": "MetastorePartitionServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient.batch_delete_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchDeleteMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "BatchDeleteMetastorePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1alpha.types.BatchDeleteMetastorePartitionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "batch_delete_metastore_partitions" + }, + "description": "Sample for BatchDeleteMetastorePartitions", + "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient", + "shortName": "MetastorePartitionServiceClient" + }, + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient.batch_delete_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchDeleteMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "BatchDeleteMetastorePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1alpha.types.BatchDeleteMetastorePartitionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "batch_delete_metastore_partitions" + }, + "description": "Sample for BatchDeleteMetastorePartitions", + "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient", + "shortName": "MetastorePartitionServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient.batch_update_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchUpdateMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "BatchUpdateMetastorePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1alpha.types.BatchUpdateMetastorePartitionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1alpha.types.BatchUpdateMetastorePartitionsResponse", + "shortName": "batch_update_metastore_partitions" + }, + "description": "Sample for BatchUpdateMetastorePartitions", + "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient", + "shortName": "MetastorePartitionServiceClient" + }, + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient.batch_update_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchUpdateMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "BatchUpdateMetastorePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1alpha.types.BatchUpdateMetastorePartitionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1alpha.types.BatchUpdateMetastorePartitionsResponse", + "shortName": "batch_update_metastore_partitions" + }, + "description": "Sample for BatchUpdateMetastorePartitions", + "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient", + "shortName": "MetastorePartitionServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient.list_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.ListMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "ListMetastorePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1alpha.types.ListMetastorePartitionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1alpha.types.ListMetastorePartitionsResponse", + "shortName": "list_metastore_partitions" + }, + "description": "Sample for ListMetastorePartitions", + "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient", + "shortName": "MetastorePartitionServiceClient" + }, + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient.list_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.ListMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "ListMetastorePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1alpha.types.ListMetastorePartitionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1alpha.types.ListMetastorePartitionsResponse", + "shortName": "list_metastore_partitions" + }, + "description": "Sample for ListMetastorePartitions", + "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient", + "shortName": "MetastorePartitionServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient.stream_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.StreamMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "StreamMetastorePartitions" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.bigquery_storage_v1alpha.types.StreamMetastorePartitionsRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1alpha.types.StreamMetastorePartitionsResponse]", + "shortName": "stream_metastore_partitions" + }, + "description": "Sample for StreamMetastorePartitions", + "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient", + "shortName": "MetastorePartitionServiceClient" + }, + "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient.stream_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.StreamMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "StreamMetastorePartitions" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.bigquery_storage_v1alpha.types.StreamMetastorePartitionsRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1alpha.types.StreamMetastorePartitionsResponse]", + "shortName": "stream_metastore_partitions" + }, + "description": "Sample for StreamMetastorePartitions", + "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_sync.py" + } + ] +} From 9cf73bc4d2be0f90e29c0e49055008e926c1bff5 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 13:38:20 -0700 Subject: [PATCH 278/338] chore(main): release 2.26.0 (#811) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c1d157157c6..1900614712c 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.26.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 302b781542e..180153550d6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.26.0" }, "snippets": [ { From b233a542c99752d9f4b5ffd67cb7f55c90ce336a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 10 Sep 2024 20:30:07 +0200 Subject: [PATCH 279/338] chore(deps): update all dependencies (#823) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 5ca374593df..29af5705676 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.25.0 +google-cloud-bigquery-storage==2.26.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 88f8dd3e2c4..7c2a117918e 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.25.0 +google-cloud-bigquery-storage==2.26.0 google-cloud-bigquery==3.25.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 999765a3bb9..f3134e8b829 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.34.0 -google-cloud-bigquery-storage==2.25.0 +google-cloud-bigquery-storage==2.26.0 google-cloud-bigquery==3.25.0 pyarrow===12.0.1; python_version == '3.7' pyarrow==17.0.0; python_version >= '3.8' From 12cce4cfcf92693961820f346b46c8bc4a3d3104 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 10 Sep 2024 21:10:00 +0200 Subject: [PATCH 280/338] chore(deps): update all dependencies (#825) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 16e6c409623..df9161beca8 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.3.2; python_version >= '3.8' +pytest==8.3.3; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index c02f8349d80..8982387d75a 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,3 +1,3 @@ google-cloud-testutils==1.4.0 pytest===7.4.3; python_version == '3.7' -pytest==8.3.2; python_version >= '3.8' +pytest==8.3.3; python_version >= '3.8' diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 16e6c409623..df9161beca8 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.3.2; python_version >= '3.8' +pytest==8.3.3; python_version >= '3.8' From 2a2406123e44362f9e19fef9f655cb0737446636 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 20 Sep 2024 14:22:35 -0700 Subject: [PATCH 281/338] fix: Increase method timeout to 240s for BigQuery Metastore Partition Service API version v1alpha (#830) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Increase method timeout to 240s for BigQuery Metastore Partition Service API version v1alpha PiperOrigin-RevId: 676173688 Source-Link: https://github.com/googleapis/googleapis/commit/02f118441fd76957d594f3a489b3b1f840fc66c3 Source-Link: https://github.com/googleapis/googleapis-gen/commit/93f225b1e5c89712fa17dc398f990bb1cd927025 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOTNmMjI1YjFlNWM4OTcxMmZhMTdkYzM5OGY5OTBiYjFjZDkyNzAyNSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index 1900614712c..c1d157157c6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.26.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 180153550d6..302b781542e 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.26.0" + "version": "0.1.0" }, "snippets": [ { From 751c61d0e14c39aa418fb4b7e6406e1f40908223 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 14:01:03 -0700 Subject: [PATCH 282/338] build(deps): bump ipython from 7.31.1 to 8.10.0 in /samples/to_dataframe (#832) Bumps [ipython](https://github.com/ipython/ipython) from 7.31.1 to 8.10.0. - [Release notes](https://github.com/ipython/ipython/releases) - [Commits](https://github.com/ipython/ipython/compare/7.31.1...8.10.0) --- updated-dependencies: - dependency-name: ipython dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index f3134e8b829..527f878c24b 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -6,7 +6,7 @@ pyarrow==17.0.0; python_version >= '3.8' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' -ipython==8.26.0; python_version >= '3.10' +ipython==8.10.0; python_version >= '3.10' ipywidgets==8.1.5 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' From 4485a447b36e9f6a6492033f795ae31ee26cc383 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 14 Oct 2024 11:33:24 -0700 Subject: [PATCH 283/338] chore(main): release 2.27.0 (#831) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c1d157157c6..83f077ebd23 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.27.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 302b781542e..13cb352b0a8 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.27.0" }, "snippets": [ { From 303f1a2f4b6d99752c3747425426c26992f06204 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 13 Nov 2024 20:59:32 +0800 Subject: [PATCH 284/338] build: use multiScm for Kokoro release builds (#844) * build: use multiScm for Kokoro release builds Source-Link: https://github.com/googleapis/synthtool/commit/0da16589204e7f61911f64fcb30ac2d3b6e59b31 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5cddfe2fb5019bbf78335bc55f15bc13e18354a56b3ff46e1834f8e540807f05 * add constraints file for python 3.13 --------- Co-authored-by: Owl Bot Co-authored-by: Linchin --- bigquery_storage/quickstart/noxfile.py | 2 +- bigquery_storage/snippets/noxfile.py | 2 +- bigquery_storage/to_dataframe/noxfile.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py index c36d5f2d81f..494639d2fa5 100644 --- a/bigquery_storage/quickstart/noxfile.py +++ b/bigquery_storage/quickstart/noxfile.py @@ -88,7 +88,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/bigquery_storage/snippets/noxfile.py b/bigquery_storage/snippets/noxfile.py index c36d5f2d81f..494639d2fa5 100644 --- a/bigquery_storage/snippets/noxfile.py +++ b/bigquery_storage/snippets/noxfile.py @@ -88,7 +88,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py index c36d5f2d81f..494639d2fa5 100644 --- a/bigquery_storage/to_dataframe/noxfile.py +++ b/bigquery_storage/to_dataframe/noxfile.py @@ -88,7 +88,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From 0fc6e4ef366a4800cf345b8d4c3261fe1fd735ec Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 15 Nov 2024 21:56:18 +0100 Subject: [PATCH 285/338] chore(deps): update all dependencies (#843) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 4 ++-- bigquery_storage/to_dataframe/requirements.txt | 10 +++++----- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 29af5705676..f68d011d849 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.26.0 +google-cloud-bigquery-storage==2.27.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 7c2a117918e..82da7ac7610 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.26.0 -google-cloud-bigquery==3.25.0 +google-cloud-bigquery-storage==2.27.0 +google-cloud-bigquery==3.26.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 527f878c24b..f2a49e1922c 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,15 +1,15 @@ -google-auth==2.34.0 -google-cloud-bigquery-storage==2.26.0 -google-cloud-bigquery==3.25.0 +google-auth==2.35.0 +google-cloud-bigquery-storage==2.27.0 +google-cloud-bigquery==3.26.0 pyarrow===12.0.1; python_version == '3.7' pyarrow==17.0.0; python_version >= '3.8' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' -ipython==8.10.0; python_version >= '3.10' +ipython==8.29.0; python_version >= '3.10' ipywidgets==8.1.5 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' -pandas==2.2.2; python_version >= '3.9' +pandas==2.2.3; python_version >= '3.9' tqdm==4.66.5 db-dtypes==1.3.0 From acc684aeaad5da02b2cef20419c9a645221224fe Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 15:58:50 -0500 Subject: [PATCH 286/338] fix: Disable universe-domain validation (#839) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.20.0 PiperOrigin-RevId: 689017451 Source-Link: https://github.com/googleapis/googleapis/commit/2c7ba7e84abb5e8b61fbbecbec4d0e98416d4a9d Source-Link: https://github.com/googleapis/googleapis-gen/commit/55b43319b3c8c5fde63c912ba55fecf4310f10bb Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTViNDMzMTliM2M4YzVmZGU2M2M5MTJiYTU1ZmVjZjQzMTBmMTBiYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.20.2 PiperOrigin-RevId: 691540410 Source-Link: https://github.com/googleapis/googleapis/commit/b43cfb18b7f23641d1254188a9cc2b3515895d98 Source-Link: https://github.com/googleapis/googleapis-gen/commit/40fd27b08abb2e8b8a84b57941e1226971f37a97 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDBmZDI3YjA4YWJiMmU4YjhhODRiNTc5NDFlMTIyNjk3MWYzN2E5NyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index 83f077ebd23..c1d157157c6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.27.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 13cb352b0a8..302b781542e 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.27.0" + "version": "0.1.0" }, "snippets": [ { From 2bb2f7fa41966288f96bbf75b3d51d13d1b5dc72 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 19 Nov 2024 22:13:32 +0100 Subject: [PATCH 287/338] chore(deps): update all dependencies (#847) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Pin pyarrow for Python 3.8 * syntax --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 11 ++++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 82da7ac7610..1de9b04aad3 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.27.0 -google-cloud-bigquery==3.26.0 +google-cloud-bigquery==3.27.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index f2a49e1922c..d8faef7e2a3 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,8 +1,9 @@ -google-auth==2.35.0 +google-auth==2.36.0 google-cloud-bigquery-storage==2.27.0 -google-cloud-bigquery==3.26.0 +google-cloud-bigquery==3.27.0 pyarrow===12.0.1; python_version == '3.7' -pyarrow==17.0.0; python_version >= '3.8' +pyarrow===17.0.0; python_version == '3.8' +pyarrow==18.0.0; python_version >= '3.9' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' @@ -11,5 +12,5 @@ ipywidgets==8.1.5 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' pandas==2.2.3; python_version >= '3.9' -tqdm==4.66.5 -db-dtypes==1.3.0 +tqdm==4.67.0 +db-dtypes==1.3.1 From 40e61428f47489eeea90fe950254a9382cb6967b Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 26 Nov 2024 19:26:49 +0100 Subject: [PATCH 288/338] chore(deps): update all dependencies (#850) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index d8faef7e2a3..7e8f5d9054a 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -12,5 +12,5 @@ ipywidgets==8.1.5 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' pandas==2.2.3; python_version >= '3.9' -tqdm==4.67.0 +tqdm==4.67.1 db-dtypes==1.3.1 From 2f99ebfcaff86b7c6d206af351186fac2d6aee35 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 2 Dec 2024 20:50:54 +0100 Subject: [PATCH 289/338] chore(deps): update all dependencies (#851) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 7e8f5d9054a..c56331318a8 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -3,7 +3,7 @@ google-cloud-bigquery-storage==2.27.0 google-cloud-bigquery==3.27.0 pyarrow===12.0.1; python_version == '3.7' pyarrow===17.0.0; python_version == '3.8' -pyarrow==18.0.0; python_version >= '3.9' +pyarrow==18.1.0; python_version >= '3.9' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' From 3fc6aa9cf266933a54d880b4feb2f121de60a5ef Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 3 Dec 2024 19:11:06 +0100 Subject: [PATCH 290/338] chore(deps): update all dependencies (#852) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index df9161beca8..427a1645b8b 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.3.3; python_version >= '3.8' +pytest==8.3.4; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 8982387d75a..a507e7ae745 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,3 +1,3 @@ google-cloud-testutils==1.4.0 pytest===7.4.3; python_version == '3.7' -pytest==8.3.3; python_version >= '3.8' +pytest==8.3.4; python_version >= '3.8' diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index df9161beca8..427a1645b8b 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.3.3; python_version >= '3.8' +pytest==8.3.4; python_version >= '3.8' diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index c56331318a8..cd216154f08 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -7,7 +7,7 @@ pyarrow==18.1.0; python_version >= '3.9' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' -ipython==8.29.0; python_version >= '3.10' +ipython==8.30.0; python_version >= '3.10' ipywidgets==8.1.5 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' From 3d958cb59deb19be3ed2249f3e1846e8733a975a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 13 Dec 2024 05:24:26 +0800 Subject: [PATCH 291/338] feat: Add support for opt-in debug logging (#855) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add support for opt-in debug logging fix: Fix typing issue with gRPC metadata when key ends in -bin chore: Update gapic-generator-python to v1.21.0 PiperOrigin-RevId: 705285820 Source-Link: https://github.com/googleapis/googleapis/commit/f9b8b9150f7fcd600b0acaeef91236b1843f5e49 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ca1e0a1e472d6e6f5de883a5cb54724f112ce348 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2ExZTBhMWU0NzJkNmU2ZjVkZTg4M2E1Y2I1NDcyNGYxMTJjZTM0OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- ...data_google.cloud.bigquery.storage.v1.json | 36 +++++++++---------- ...google.cloud.bigquery.storage.v1alpha.json | 20 +++++------ ...google.cloud.bigquery.storage.v1beta2.json | 36 +++++++++---------- 3 files changed, 46 insertions(+), 46 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c1d157157c6..c9e425d474f 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -55,7 +55,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1.types.ReadSession", @@ -143,7 +143,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1.types.ReadSession", @@ -228,7 +228,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.ReadRowsResponse]", @@ -312,7 +312,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.ReadRowsResponse]", @@ -389,7 +389,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1.types.SplitReadStreamResponse", @@ -465,7 +465,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1.types.SplitReadStreamResponse", @@ -542,7 +542,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.AppendRowsResponse]", @@ -618,7 +618,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.AppendRowsResponse]", @@ -699,7 +699,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsResponse", @@ -779,7 +779,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsResponse", @@ -864,7 +864,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", @@ -948,7 +948,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", @@ -1029,7 +1029,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamResponse", @@ -1109,7 +1109,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamResponse", @@ -1190,7 +1190,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1.types.FlushRowsResponse", @@ -1270,7 +1270,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1.types.FlushRowsResponse", @@ -1351,7 +1351,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", @@ -1431,7 +1431,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json index 3c29286ee87..d55a493695e 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json @@ -43,7 +43,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1alpha.types.BatchCreateMetastorePartitionsResponse", @@ -119,7 +119,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1alpha.types.BatchCreateMetastorePartitionsResponse", @@ -196,7 +196,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "batch_delete_metastore_partitions" @@ -269,7 +269,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "batch_delete_metastore_partitions" @@ -343,7 +343,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1alpha.types.BatchUpdateMetastorePartitionsResponse", @@ -419,7 +419,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1alpha.types.BatchUpdateMetastorePartitionsResponse", @@ -500,7 +500,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1alpha.types.ListMetastorePartitionsResponse", @@ -580,7 +580,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1alpha.types.ListMetastorePartitionsResponse", @@ -657,7 +657,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.bigquery_storage_v1alpha.types.StreamMetastorePartitionsResponse]", @@ -733,7 +733,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.bigquery_storage_v1alpha.types.StreamMetastorePartitionsResponse]", diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 302b781542e..e92a81e4da6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -55,7 +55,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1beta2.types.ReadSession", @@ -143,7 +143,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1beta2.types.ReadSession", @@ -228,7 +228,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.ReadRowsResponse]", @@ -312,7 +312,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.ReadRowsResponse]", @@ -389,7 +389,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamResponse", @@ -465,7 +465,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamResponse", @@ -542,7 +542,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.AppendRowsResponse]", @@ -618,7 +618,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.AppendRowsResponse]", @@ -699,7 +699,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsResponse", @@ -779,7 +779,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsResponse", @@ -864,7 +864,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", @@ -948,7 +948,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", @@ -1029,7 +1029,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamResponse", @@ -1109,7 +1109,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamResponse", @@ -1190,7 +1190,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsResponse", @@ -1270,7 +1270,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsResponse", @@ -1351,7 +1351,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", @@ -1431,7 +1431,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", From 7bea80cda2a337261e58bef7434c5913c8eac210 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 12 Dec 2024 23:14:53 +0100 Subject: [PATCH 292/338] chore(deps): update all dependencies (#854) Co-authored-by: Lingqing Gan --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index cd216154f08..d9885480c45 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.36.0 +google-auth==2.37.0 google-cloud-bigquery-storage==2.27.0 google-cloud-bigquery==3.27.0 pyarrow===12.0.1; python_version == '3.7' From 87618cabf3079484d0c8e6076bfa090f4a02c706 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 17 Dec 2024 20:21:22 +0100 Subject: [PATCH 293/338] chore(deps): update all dependencies (#856) Co-authored-by: Lingqing Gan --- bigquery_storage/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index a507e7ae745..3e93ef86d3c 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,3 +1,3 @@ -google-cloud-testutils==1.4.0 +google-cloud-testutils==1.5.0 pytest===7.4.3; python_version == '3.7' pytest==8.3.4; python_version >= '3.8' From 96c1d8defd1f6bbcc2fca9e717232a673133112e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 3 Jan 2025 23:40:08 +0100 Subject: [PATCH 294/338] chore(deps): update all dependencies (#858) * chore(deps): update all dependencies * Update unittest.yml --------- Co-authored-by: Lingqing Gan --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index d9885480c45..900a88a76c0 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -7,7 +7,7 @@ pyarrow==18.1.0; python_version >= '3.9' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' -ipython==8.30.0; python_version >= '3.10' +ipython==8.31.0; python_version >= '3.10' ipywidgets==8.1.5 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' From 16ec227365fcdd20ff7032f1b3ccb6c3f0b36b6a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 28 Jan 2025 20:32:36 +0100 Subject: [PATCH 295/338] chore(deps): update all dependencies (#866) --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 1de9b04aad3..8007c6ca54f 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.27.0 -google-cloud-bigquery==3.27.0 +google-cloud-bigquery==3.29.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 900a88a76c0..41ed86dd32f 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,9 +1,9 @@ -google-auth==2.37.0 +google-auth==2.38.0 google-cloud-bigquery-storage==2.27.0 -google-cloud-bigquery==3.27.0 +google-cloud-bigquery==3.29.0 pyarrow===12.0.1; python_version == '3.7' pyarrow===17.0.0; python_version == '3.8' -pyarrow==18.1.0; python_version >= '3.9' +pyarrow==19.0.0; python_version >= '3.9' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' @@ -13,4 +13,4 @@ pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' pandas==2.2.3; python_version >= '3.9' tqdm==4.67.1 -db-dtypes==1.3.1 +db-dtypes==1.4.0 From 698e6baae566c3eb625ea69e036c3b4fae1635f9 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 4 Feb 2025 10:21:17 -0600 Subject: [PATCH 296/338] chore(main): release 2.28.0 (#848) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 2.28.0 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c9e425d474f..f54de771b6f 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.28.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index e92a81e4da6..7e208a29c87 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.28.0" }, "snippets": [ { From d51706ee742b13a656a5fdbf0a67a1d57dab816b Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 4 Feb 2025 19:49:07 +0100 Subject: [PATCH 297/338] chore(deps): update all dependencies (#880) Co-authored-by: Lingqing Gan --- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 3e93ef86d3c..43975b86927 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,3 +1,3 @@ -google-cloud-testutils==1.5.0 +google-cloud-testutils==1.6.0 pytest===7.4.3; python_version == '3.7' pytest==8.3.4; python_version >= '3.8' diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 41ed86dd32f..0730af51e08 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -7,7 +7,7 @@ pyarrow==19.0.0; python_version >= '3.9' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' -ipython==8.31.0; python_version >= '3.10' +ipython==8.32.0; python_version >= '3.10' ipywidgets==8.1.5 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' From bc96dfa20343c464338ed3e795d1acbd4833efd6 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 11 Feb 2025 20:32:51 +0100 Subject: [PATCH 298/338] chore(deps): update all dependencies (#881) --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index f68d011d849..238ac503830 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.27.0 +google-cloud-bigquery-storage==2.28.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 8007c6ca54f..54d91ef83b1 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.27.0 +google-cloud-bigquery-storage==2.28.0 google-cloud-bigquery==3.29.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 0730af51e08..5c96e2d5bfe 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.38.0 -google-cloud-bigquery-storage==2.27.0 +google-cloud-bigquery-storage==2.28.0 google-cloud-bigquery==3.29.0 pyarrow===12.0.1; python_version == '3.7' pyarrow===17.0.0; python_version == '3.8' @@ -13,4 +13,4 @@ pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' pandas==2.2.3; python_version >= '3.9' tqdm==4.67.1 -db-dtypes==1.4.0 +db-dtypes==1.4.1 From 5dc943024bbcdef385b31c3ed3fd5d5a310c81db Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 11 Feb 2025 15:44:30 -0800 Subject: [PATCH 299/338] feat: Add REST Interceptors which support reading metadata (#884) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add REST Interceptors which support reading metadata feat: Add support for reading selective GAPIC generation methods from service YAML chore: Update gapic-generator-python to v1.22.0 PiperOrigin-RevId: 724026024 Source-Link: https://github.com/googleapis/googleapis/commit/ad9963857109513e77eed153a66264481789109f Source-Link: https://github.com/googleapis/googleapis-gen/commit/e291c4dd1d670eda19998de76f967e1603a48993 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTI5MWM0ZGQxZDY3MGVkYTE5OTk4ZGU3NmY5NjdlMTYwM2E0ODk5MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Lingqing Gan --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index f54de771b6f..c9e425d474f 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.28.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 7e208a29c87..e92a81e4da6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.28.0" + "version": "0.1.0" }, "snippets": [ { From 55834739821e7b3bb03eb95d1dc2218a7fe57c59 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 20 Feb 2025 20:11:03 +0100 Subject: [PATCH 300/338] chore(deps): update dependency pyarrow to v19.0.1 (#894) Co-authored-by: Lingqing Gan --- bigquery_storage/to_dataframe/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 5c96e2d5bfe..10733632fe4 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -3,7 +3,7 @@ google-cloud-bigquery-storage==2.28.0 google-cloud-bigquery==3.29.0 pyarrow===12.0.1; python_version == '3.7' pyarrow===17.0.0; python_version == '3.8' -pyarrow==19.0.0; python_version >= '3.9' +pyarrow==19.0.1; python_version >= '3.9' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' From 30cad9894e21cc710c46700597d5abb69e3a6f53 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 28 Feb 2025 23:00:03 +0100 Subject: [PATCH 301/338] chore(deps): update all dependencies (#898) * chore(deps): update all dependencies * pin ipython===8.33.0 for python 3.10 --------- Co-authored-by: Lingqing Gan --- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 54d91ef83b1..acf944d68b3 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-bigquery-storage==2.28.0 -google-cloud-bigquery==3.29.0 +google-cloud-bigquery==3.30.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 10733632fe4..8bfdd487f05 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,13 +1,14 @@ google-auth==2.38.0 google-cloud-bigquery-storage==2.28.0 -google-cloud-bigquery==3.29.0 +google-cloud-bigquery==3.30.0 pyarrow===12.0.1; python_version == '3.7' pyarrow===17.0.0; python_version == '3.8' pyarrow==19.0.1; python_version >= '3.9' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' -ipython==8.32.0; python_version >= '3.10' +ipython===8.33.0; python_version == '3.10' +ipython==9.0.0; python_version >= '3.11' ipywidgets==8.1.5 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' From f230168076676cc8c37088248a7a92788170cb9b Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 4 Mar 2025 19:26:55 +0100 Subject: [PATCH 302/338] chore(deps): update all dependencies (#900) --- bigquery_storage/quickstart/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements-test.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 427a1645b8b..0404a8ad211 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.3.4; python_version >= '3.8' +pytest==8.3.5; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 43975b86927..acfea47c216 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,3 +1,3 @@ google-cloud-testutils==1.6.0 pytest===7.4.3; python_version == '3.7' -pytest==8.3.4; python_version >= '3.8' +pytest==8.3.5; python_version >= '3.8' diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 427a1645b8b..0404a8ad211 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.3.4; python_version >= '3.8' +pytest==8.3.5; python_version >= '3.8' diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 8bfdd487f05..20f2351df4b 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -8,7 +8,7 @@ ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' ipython===8.33.0; python_version == '3.10' -ipython==9.0.0; python_version >= '3.11' +ipython==9.0.1; python_version >= '3.11' ipywidgets==8.1.5 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' From 62af9c31b4380d83f3d4fd6b4d60d0033ca7c640 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 11 Mar 2025 14:32:58 +0100 Subject: [PATCH 303/338] chore(deps): update all dependencies (#903) --- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 20f2351df4b..6cdc435a7ba 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -8,10 +8,10 @@ ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' ipython===8.33.0; python_version == '3.10' -ipython==9.0.1; python_version >= '3.11' +ipython==9.0.2; python_version >= '3.11' ipywidgets==8.1.5 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' pandas==2.2.3; python_version >= '3.9' tqdm==4.67.1 -db-dtypes==1.4.1 +db-dtypes==1.4.2 From 47a8fcee8a122e1af8b88856aee44586dea31d58 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 11 Mar 2025 13:14:16 -0700 Subject: [PATCH 304/338] chore(main): release 2.29.0 (#887) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c9e425d474f..c0c21e24336 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.29.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index e92a81e4da6..a86ad141653 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.29.0" }, "snippets": [ { From 3efdde6167b14029fe569f4ed75b601dc1688995 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 13 Mar 2025 01:03:54 +0100 Subject: [PATCH 305/338] chore(deps): update all dependencies (#908) * chore(deps): update all dependencies * Update lint.yml * Update docs.yml --------- Co-authored-by: Lingqing Gan --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 238ac503830..e593894637c 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.28.0 +google-cloud-bigquery-storage==2.29.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index acf944d68b3..58b88cef0be 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.28.0 +google-cloud-bigquery-storage==2.29.0 google-cloud-bigquery==3.30.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 6cdc435a7ba..f61186d5fa2 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.38.0 -google-cloud-bigquery-storage==2.28.0 +google-cloud-bigquery-storage==2.29.0 google-cloud-bigquery==3.30.0 pyarrow===12.0.1; python_version == '3.7' pyarrow===17.0.0; python_version == '3.8' From 4419e67e98e80a574ed94bb881a8f27a5793f4cd Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 13 Mar 2025 10:31:51 -0700 Subject: [PATCH 306/338] chore(main): release 2.29.1 (#910) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c0c21e24336..402321b6824 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.29.0" + "version": "2.29.1" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index a86ad141653..24bed20c192 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.29.0" + "version": "2.29.1" }, "snippets": [ { From 68b2ae70f41d0eb499c9e6b6a541c01fa827b9b5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 19 Mar 2025 10:51:10 -0400 Subject: [PATCH 307/338] chore: Update gapic-generator-python to 1.23.6 (#917) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to 1.23.6 PiperOrigin-RevId: 738170370 Source-Link: https://github.com/googleapis/googleapis/commit/3f1e17aa2dec3f146a9a2a8a64c5c6d19d0b6e15 Source-Link: https://github.com/googleapis/googleapis-gen/commit/9afd8c33d4cae610b75fa4999264ea8c8c66b9d2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOWFmZDhjMzNkNGNhZTYxMGI3NWZhNDk5OTI2NGVhOGM4YzY2YjlkMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- ...age_v1_generated_big_query_read_create_read_session_async.py | 2 +- ...rage_v1_generated_big_query_read_create_read_session_sync.py | 2 +- ...gquerystorage_v1_generated_big_query_read_read_rows_async.py | 2 +- ...igquerystorage_v1_generated_big_query_read_read_rows_sync.py | 2 +- ...orage_v1_generated_big_query_read_split_read_stream_async.py | 2 +- ...torage_v1_generated_big_query_read_split_read_stream_sync.py | 2 +- ...erystorage_v1_generated_big_query_write_append_rows_async.py | 2 +- ...uerystorage_v1_generated_big_query_write_append_rows_sync.py | 2 +- ...enerated_big_query_write_batch_commit_write_streams_async.py | 2 +- ...generated_big_query_write_batch_commit_write_streams_sync.py | 2 +- ...ge_v1_generated_big_query_write_create_write_stream_async.py | 2 +- ...age_v1_generated_big_query_write_create_write_stream_sync.py | 2 +- ..._v1_generated_big_query_write_finalize_write_stream_async.py | 2 +- ...e_v1_generated_big_query_write_finalize_write_stream_sync.py | 2 +- ...uerystorage_v1_generated_big_query_write_flush_rows_async.py | 2 +- ...querystorage_v1_generated_big_query_write_flush_rows_sync.py | 2 +- ...orage_v1_generated_big_query_write_get_write_stream_async.py | 2 +- ...torage_v1_generated_big_query_write_get_write_stream_sync.py | 2 +- ...partition_service_batch_create_metastore_partitions_async.py | 2 +- ..._partition_service_batch_create_metastore_partitions_sync.py | 2 +- ...partition_service_batch_delete_metastore_partitions_async.py | 2 +- ..._partition_service_batch_delete_metastore_partitions_sync.py | 2 +- ...partition_service_batch_update_metastore_partitions_async.py | 2 +- ..._partition_service_batch_update_metastore_partitions_sync.py | 2 +- ...tastore_partition_service_list_metastore_partitions_async.py | 2 +- ...etastore_partition_service_list_metastore_partitions_sync.py | 2 +- ...store_partition_service_stream_metastore_partitions_async.py | 2 +- ...astore_partition_service_stream_metastore_partitions_sync.py | 2 +- ...1beta2_generated_big_query_read_create_read_session_async.py | 2 +- ...v1beta2_generated_big_query_read_create_read_session_sync.py | 2 +- ...ystorage_v1beta2_generated_big_query_read_read_rows_async.py | 2 +- ...rystorage_v1beta2_generated_big_query_read_read_rows_sync.py | 2 +- ..._v1beta2_generated_big_query_read_split_read_stream_async.py | 2 +- ...e_v1beta2_generated_big_query_read_split_read_stream_sync.py | 2 +- ...orage_v1beta2_generated_big_query_write_append_rows_async.py | 2 +- ...torage_v1beta2_generated_big_query_write_append_rows_sync.py | 2 +- ...enerated_big_query_write_batch_commit_write_streams_async.py | 2 +- ...generated_big_query_write_batch_commit_write_streams_sync.py | 2 +- ...beta2_generated_big_query_write_create_write_stream_async.py | 2 +- ...1beta2_generated_big_query_write_create_write_stream_sync.py | 2 +- ...ta2_generated_big_query_write_finalize_write_stream_async.py | 2 +- ...eta2_generated_big_query_write_finalize_write_stream_sync.py | 2 +- ...torage_v1beta2_generated_big_query_write_flush_rows_async.py | 2 +- ...storage_v1beta2_generated_big_query_write_flush_rows_sync.py | 2 +- ..._v1beta2_generated_big_query_write_get_write_stream_async.py | 2 +- ...e_v1beta2_generated_big_query_write_get_write_stream_sync.py | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 48 files changed, 48 insertions(+), 48 deletions(-) diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py index b70da03de6d..ea25d382c8f 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py index f722665a57a..13d5a4577a8 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py index 4057ec36aba..fad19adb7d0 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py index c80d0d4aa06..07aedb01554 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py index 3c961176d68..a814616bf90 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py index f4fe475aba0..270872795ed 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py index 65ea3ade58d..f8bb1c961cb 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py index 7777db6818d..2be6201dd70 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py index d4529cb9867..65ef174ced0 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py index 1458002ba82..d2736556fb1 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py index fe8d874d089..3c4bdadeb80 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py index b731e2f7c94..b80374d91c9 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py index ad0549357c3..18c59d61e01 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py index 3492253ba9a..62a5dfed2f1 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py index ee486bd5e04..4acee8ae560 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py index cf94b598c40..65dd50cd648 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py index f1b5f6e854b..346f83769bb 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py index 314bfa49df3..ebf20ac0bbe 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_async.py index 2840025ed36..e7b1b056f71 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py index fabe4a00fb0..616dd180bd1 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py index 231e2997528..cc589bc9e53 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py index 74242022f8a..dc3706ef44b 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_async.py index c1608f7784e..7a93f362492 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py index 8f2f2005b57..cedd86550e3 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_async.py index 344a5253cb7..5c52317101c 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_sync.py index 09cb3122a36..37389a34069 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_async.py index 310b0c6c483..f05ef144611 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_sync.py index 4e4b85f3ca3..2abac02fed6 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py index 2cc08be026d..257faccba25 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py index 70d121f21e3..0fc018f2b7c 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py index ede50645b29..f82a610000c 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py index dd7bf53ed55..e106f2c7b98 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py index 54fbf14e7c8..c84b1e7b4e2 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py index 6091da370f5..6a3445a5893 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py index 592bacedf6e..e47fbc36a8e 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py index b9ae5dd7be0..29e5204bc18 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py index 43b9f24f257..c1fb793265e 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py index d8f293bf2e1..5ca9e6ec389 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py index 7c0ed7e9e66..bffe548d26d 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py index d8deeaf6a7a..416dafdbdeb 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py index 28c2f48c0aa..1b0ebccb4c1 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py index 8edca5d7b6d..e40bf3ba6a3 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py index 15788671762..ec0ca244098 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py index a2a5881231b..d0752d6a4cf 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py index 6a1102b6040..75074c2db8b 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py index 04ad30d9584..62e0f48313a 100644 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index 402321b6824..c9e425d474f 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.29.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 24bed20c192..e92a81e4da6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.29.1" + "version": "0.1.0" }, "snippets": [ { From 741fada12a425966b3f251522e1240e81c8180d1 Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Tue, 25 Mar 2025 11:47:46 -0700 Subject: [PATCH 308/338] feat: sample for AppendRowsStream with Arrow (#915) * feat: sample for AppendRowsStream with Arrow * Update requirements.txt * add test fixtures * lint * remove unused code * correct fixtures * add log for debugging * remove debug code and small fixes * remove fastavro in requirements.txt * remove version for storage * add print * use schema in template and add table verification * add a simple strategy to split large tables * fix unit test * wording * add dependency * address comments --- bigquery_storage/conftest.py | 18 ++ bigquery_storage/pyarrow/__init__.py | 15 + .../pyarrow/append_rows_with_arrow.py | 212 +++++++++++++ .../pyarrow/append_rows_with_arrow_test.py | 19 ++ bigquery_storage/pyarrow/noxfile.py | 292 ++++++++++++++++++ .../pyarrow/requirements-test.txt | 2 + bigquery_storage/pyarrow/requirements.txt | 5 + 7 files changed, 563 insertions(+) create mode 100644 bigquery_storage/pyarrow/__init__.py create mode 100644 bigquery_storage/pyarrow/append_rows_with_arrow.py create mode 100644 bigquery_storage/pyarrow/append_rows_with_arrow_test.py create mode 100644 bigquery_storage/pyarrow/noxfile.py create mode 100644 bigquery_storage/pyarrow/requirements-test.txt create mode 100644 bigquery_storage/pyarrow/requirements.txt diff --git a/bigquery_storage/conftest.py b/bigquery_storage/conftest.py index 92068ef539c..9e4b23e8be0 100644 --- a/bigquery_storage/conftest.py +++ b/bigquery_storage/conftest.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime import os import pytest @@ -20,3 +21,20 @@ @pytest.fixture(scope="session") def project_id(): return os.environ["GOOGLE_CLOUD_PROJECT"] + + +@pytest.fixture(scope="session") +def dataset(project_id): + from google.cloud import bigquery + + client = bigquery.Client() + dataset_suffix = datetime.datetime.now().strftime("%y%m%d_%H%M%S") + dataset_name = "samples_tests_" + dataset_suffix + + dataset_id = "{}.{}".format(project_id, dataset_name) + dataset = bigquery.Dataset(dataset_id) + dataset.location = "us-east7" + created_dataset = client.create_dataset(dataset) + yield created_dataset + + client.delete_dataset(created_dataset, delete_contents=True) diff --git a/bigquery_storage/pyarrow/__init__.py b/bigquery_storage/pyarrow/__init__.py new file mode 100644 index 00000000000..a2a70562f48 --- /dev/null +++ b/bigquery_storage/pyarrow/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/bigquery_storage/pyarrow/append_rows_with_arrow.py b/bigquery_storage/pyarrow/append_rows_with_arrow.py new file mode 100644 index 00000000000..727fb4505d8 --- /dev/null +++ b/bigquery_storage/pyarrow/append_rows_with_arrow.py @@ -0,0 +1,212 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import datetime +import decimal +import pandas as pd +import pyarrow as pa + +from google.cloud import bigquery +from google.cloud.bigquery import enums +from google.cloud.bigquery_storage_v1 import types as gapic_types +from google.cloud.bigquery_storage_v1.writer import AppendRowsStream + + +TABLE_LENGTH = 100_000 + +BQ_SCHEMA = [ + bigquery.SchemaField("bool_col", enums.SqlTypeNames.BOOLEAN), + bigquery.SchemaField("int64_col", enums.SqlTypeNames.INT64), + bigquery.SchemaField("float64_col", enums.SqlTypeNames.FLOAT64), + bigquery.SchemaField("numeric_col", enums.SqlTypeNames.NUMERIC), + bigquery.SchemaField("bignumeric_col", enums.SqlTypeNames.BIGNUMERIC), + bigquery.SchemaField("string_col", enums.SqlTypeNames.STRING), + bigquery.SchemaField("bytes_col", enums.SqlTypeNames.BYTES), + bigquery.SchemaField("date_col", enums.SqlTypeNames.DATE), + bigquery.SchemaField("datetime_col", enums.SqlTypeNames.DATETIME), + bigquery.SchemaField("time_col", enums.SqlTypeNames.TIME), + bigquery.SchemaField("timestamp_col", enums.SqlTypeNames.TIMESTAMP), + bigquery.SchemaField("geography_col", enums.SqlTypeNames.GEOGRAPHY), + bigquery.SchemaField( + "range_date_col", enums.SqlTypeNames.RANGE, range_element_type="DATE" + ), + bigquery.SchemaField( + "range_datetime_col", + enums.SqlTypeNames.RANGE, + range_element_type="DATETIME", + ), + bigquery.SchemaField( + "range_timestamp_col", + enums.SqlTypeNames.RANGE, + range_element_type="TIMESTAMP", + ), +] + +PYARROW_SCHEMA = pa.schema( + [ + pa.field("bool_col", pa.bool_()), + pa.field("int64_col", pa.int64()), + pa.field("float64_col", pa.float64()), + pa.field("numeric_col", pa.decimal128(38, scale=9)), + pa.field("bignumeric_col", pa.decimal256(76, scale=38)), + pa.field("string_col", pa.string()), + pa.field("bytes_col", pa.binary()), + pa.field("date_col", pa.date32()), + pa.field("datetime_col", pa.timestamp("us")), + pa.field("time_col", pa.time64("us")), + pa.field("timestamp_col", pa.timestamp("us")), + pa.field("geography_col", pa.string()), + pa.field( + "range_date_col", + pa.struct([("start", pa.date32()), ("end", pa.date32())]), + ), + pa.field( + "range_datetime_col", + pa.struct([("start", pa.timestamp("us")), ("end", pa.timestamp("us"))]), + ), + pa.field( + "range_timestamp_col", + pa.struct([("start", pa.timestamp("us")), ("end", pa.timestamp("us"))]), + ), + ] +) + + +def bqstorage_write_client(): + from google.cloud import bigquery_storage_v1 + + return bigquery_storage_v1.BigQueryWriteClient() + + +def make_table(project_id, dataset_id, bq_client): + table_id = "append_rows_w_arrow_test" + table_id_full = f"{project_id}.{dataset_id}.{table_id}" + bq_table = bigquery.Table(table_id_full, schema=BQ_SCHEMA) + created_table = bq_client.create_table(bq_table) + + return created_table + + +def create_stream(bqstorage_write_client, table): + stream_name = f"projects/{table.project}/datasets/{table.dataset_id}/tables/{table.table_id}/_default" + request_template = gapic_types.AppendRowsRequest() + request_template.write_stream = stream_name + + # Add schema to the template. + arrow_data = gapic_types.AppendRowsRequest.ArrowData() + arrow_data.writer_schema.serialized_schema = PYARROW_SCHEMA.serialize().to_pybytes() + request_template.arrow_rows = arrow_data + + append_rows_stream = AppendRowsStream( + bqstorage_write_client, + request_template, + ) + return append_rows_stream + + +def generate_pyarrow_table(num_rows=TABLE_LENGTH): + date_1 = datetime.date(2020, 10, 1) + date_2 = datetime.date(2021, 10, 1) + + datetime_1 = datetime.datetime(2016, 12, 3, 14, 11, 27, 123456) + datetime_2 = datetime.datetime(2017, 12, 3, 14, 11, 27, 123456) + + timestamp_1 = datetime.datetime( + 1999, 12, 31, 23, 59, 59, 999999, tzinfo=datetime.timezone.utc + ) + timestamp_2 = datetime.datetime( + 2000, 12, 31, 23, 59, 59, 999999, tzinfo=datetime.timezone.utc + ) + + # Pandas Dataframe. + rows = [] + for i in range(num_rows): + row = { + "bool_col": True, + "int64_col": i, + "float64_col": float(i), + "numeric_col": decimal.Decimal("0.000000001"), + "bignumeric_col": decimal.Decimal("0.1234567891"), + "string_col": "data as string", + "bytes_col": str.encode("data in bytes"), + "date_col": datetime.date(2019, 5, 10), + "datetime_col": datetime_1, + "time_col": datetime.time(23, 59, 59, 999999), + "timestamp_col": timestamp_1, + "geography_col": "POINT(-121 41)", + "range_date_col": {"start": date_1, "end": date_2}, + "range_datetime_col": {"start": datetime_1, "end": datetime_2}, + "range_timestamp_col": {"start": timestamp_1, "end": timestamp_2}, + } + rows.append(row) + df = pd.DataFrame(rows) + + # Dataframe to PyArrow Table. + table = pa.Table.from_pandas(df, schema=PYARROW_SCHEMA) + + return table + + +def generate_write_requests(pyarrow_table): + # Determine max_chunksize of the record batches. Because max size of + # AppendRowsRequest is 10 MB, we need to split the table if it's too big. + # See: https://cloud.google.com/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1#appendrowsrequest + max_request_bytes = 10 * 2**20 # 10 MB + chunk_num = int(pyarrow_table.nbytes / max_request_bytes) + 1 + chunk_size = int(pyarrow_table.num_rows / chunk_num) + + # Construct request(s). + for batch in pyarrow_table.to_batches(max_chunksize=chunk_size): + request = gapic_types.AppendRowsRequest() + request.arrow_rows.rows.serialized_record_batch = batch.serialize().to_pybytes() + yield request + + +def append_rows(bqstorage_write_client, table): + append_rows_stream = create_stream(bqstorage_write_client, table) + pyarrow_table = generate_pyarrow_table() + futures = [] + + for request in generate_write_requests(pyarrow_table): + response_future = append_rows_stream.send(request) + futures.append(response_future) + response_future.result() + + return futures + + +def verify_result(client, table, futures): + bq_table = client.get_table(table) + + # Verify table schema. + assert bq_table.schema == BQ_SCHEMA + + # Verify table size. + query = client.query(f"SELECT COUNT(1) FROM `{bq_table}`;") + query_result = query.result().to_dataframe() + # There might be extra rows due to retries. + assert query_result.iloc[0, 0] >= TABLE_LENGTH + + # Verify that table was split into multiple requests. + assert len(futures) == 2 + + +def main(project_id, dataset): + write_client = bqstorage_write_client() + bq_client = bigquery.Client() + table = make_table(project_id, dataset.dataset_id, bq_client) + + futures = append_rows(write_client, table) + verify_result(bq_client, table, futures) diff --git a/bigquery_storage/pyarrow/append_rows_with_arrow_test.py b/bigquery_storage/pyarrow/append_rows_with_arrow_test.py new file mode 100644 index 00000000000..2b1c8f1da36 --- /dev/null +++ b/bigquery_storage/pyarrow/append_rows_with_arrow_test.py @@ -0,0 +1,19 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from . import append_rows_with_arrow + + +def test_append_rows_with_arrow(project_id, dataset): + append_rows_with_arrow.main(project_id, dataset) diff --git a/bigquery_storage/pyarrow/noxfile.py b/bigquery_storage/pyarrow/noxfile.py new file mode 100644 index 00000000000..494639d2fa5 --- /dev/null +++ b/bigquery_storage/pyarrow/noxfile.py @@ -0,0 +1,292 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import glob +import os +from pathlib import Path +import sys +from typing import Callable, Dict, Optional + +import nox + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" + +# Copy `noxfile_config.py` to your directory and modify it instead. + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars() -> Dict[str, str]: + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to test samples. +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + +# +# Style Checks +# + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8") + else: + session.install("flake8", "flake8-annotations") + + args = FLAKE8_COMMON_ARGS + [ + ".", + ] + session.run("flake8", *args) + + +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + +# +# format = isort + black +# + + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + # check for presence of tests + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) + test_list.extend(glob.glob("**/tests", recursive=True)) + + if len(test_list) == 0: + print("No tests found, skipping directory.") + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) + elif "pytest-xdist" in packages: + concurrent_args.extend(["-n", "auto"]) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session: nox.sessions.Session) -> None: + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root() -> Optional[str]: + """Returns the root folder of the project.""" + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session: nox.sessions.Session, path: str) -> None: + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/bigquery_storage/pyarrow/requirements-test.txt b/bigquery_storage/pyarrow/requirements-test.txt new file mode 100644 index 00000000000..0404a8ad211 --- /dev/null +++ b/bigquery_storage/pyarrow/requirements-test.txt @@ -0,0 +1,2 @@ +pytest===7.4.3; python_version == '3.7' +pytest==8.3.5; python_version >= '3.8' diff --git a/bigquery_storage/pyarrow/requirements.txt b/bigquery_storage/pyarrow/requirements.txt new file mode 100644 index 00000000000..a593373b829 --- /dev/null +++ b/bigquery_storage/pyarrow/requirements.txt @@ -0,0 +1,5 @@ +db_dtypes +google-cloud-bigquery +google-cloud-bigquery-storage +pandas +pyarrow From be9a81b5b7bf92b1b46f9881c632f05f9711e15c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 25 Mar 2025 14:46:44 -0700 Subject: [PATCH 309/338] chore(main): release 2.30.0 (#914) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(main): release 2.30.0 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- ...nippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- bigquery_storage/pyarrow/append_rows_with_arrow.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c9e425d474f..a569a72b57b 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.30.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index e92a81e4da6..2c3e4a3e6d1 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.30.0" }, "snippets": [ { diff --git a/bigquery_storage/pyarrow/append_rows_with_arrow.py b/bigquery_storage/pyarrow/append_rows_with_arrow.py index 727fb4505d8..84e03531fc1 100644 --- a/bigquery_storage/pyarrow/append_rows_with_arrow.py +++ b/bigquery_storage/pyarrow/append_rows_with_arrow.py @@ -15,15 +15,15 @@ # limitations under the License. import datetime import decimal + +from google.cloud.bigquery import enums import pandas as pd import pyarrow as pa from google.cloud import bigquery -from google.cloud.bigquery import enums from google.cloud.bigquery_storage_v1 import types as gapic_types from google.cloud.bigquery_storage_v1.writer import AppendRowsStream - TABLE_LENGTH = 100_000 BQ_SCHEMA = [ From 0cc8bf3f828ced6562eee93b0954c9c173ad179f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 27 Mar 2025 19:48:49 +0100 Subject: [PATCH 310/338] chore(deps): update all dependencies (#912) * chore(deps): update all dependencies * Update docs.yml * Update lint.yml --------- Co-authored-by: Lingqing Gan --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index e593894637c..a981ba188b3 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.29.0 +google-cloud-bigquery-storage==2.29.1 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 58b88cef0be..6007df1a8d5 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-bigquery-storage==2.29.0 +google-cloud-bigquery-storage==2.29.1 google-cloud-bigquery==3.30.0 diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index f61186d5fa2..83a68d59dd2 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,5 +1,5 @@ google-auth==2.38.0 -google-cloud-bigquery-storage==2.29.0 +google-cloud-bigquery-storage==2.29.1 google-cloud-bigquery==3.30.0 pyarrow===12.0.1; python_version == '3.7' pyarrow===17.0.0; python_version == '3.8' From b30d9ee991369c60e5cfef0f33f28b8dacedc216 Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Mon, 31 Mar 2025 16:15:41 -0700 Subject: [PATCH 311/338] test: make pyarrow sample less flaky (#919) * test: make pyarrow sample less flaky * add comment --- bigquery_storage/conftest.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/conftest.py b/bigquery_storage/conftest.py index 9e4b23e8be0..fad90af1a6d 100644 --- a/bigquery_storage/conftest.py +++ b/bigquery_storage/conftest.py @@ -14,6 +14,7 @@ import datetime import os +import random import pytest @@ -28,8 +29,12 @@ def dataset(project_id): from google.cloud import bigquery client = bigquery.Client() - dataset_suffix = datetime.datetime.now().strftime("%y%m%d_%H%M%S") - dataset_name = "samples_tests_" + dataset_suffix + + # Add a random suffix to dataset name to avoid conflict, because we run + # a samples test on each supported Python version almost at the same time. + dataset_time = datetime.datetime.now().strftime("%y%m%d_%H%M%S") + suffix = f"_{(random.randint(0, 99)):02d}" + dataset_name = "samples_tests_" + dataset_time + suffix dataset_id = "{}.{}".format(project_id, dataset_name) dataset = bigquery.Dataset(dataset_id) From b580b7f17bfdf56b4615b10ea2990863ebcf4dc2 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 3 Apr 2025 20:02:33 +0200 Subject: [PATCH 312/338] chore(deps): update all dependencies (#920) * chore(deps): update all dependencies * Update docs.yml * Update lint.yml * Update requirements.txt * Update requirements.txt --------- Co-authored-by: Lingqing Gan --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 5 +++-- bigquery_storage/to_dataframe/requirements.txt | 5 +++-- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index a981ba188b3..9e56bca4dfd 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.29.1 +google-cloud-bigquery-storage==2.30.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 6007df1a8d5..06c0e62f1da 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,2 +1,3 @@ -google-cloud-bigquery-storage==2.29.1 -google-cloud-bigquery==3.30.0 +google-cloud-bigquery-storage==2.30.0 +google-cloud-bigquery==3.30.0; python_version <= '3.8' +google-cloud-bigquery==3.31.0; python_version >= '3.9' diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 83a68d59dd2..030552a12d3 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,6 +1,7 @@ google-auth==2.38.0 -google-cloud-bigquery-storage==2.29.1 -google-cloud-bigquery==3.30.0 +google-cloud-bigquery-storage==2.30.0 +google-cloud-bigquery==3.30.0; python_version <= '3.8' +google-cloud-bigquery==3.31.0; python_version >= '3.9' pyarrow===12.0.1; python_version == '3.7' pyarrow===17.0.0; python_version == '3.8' pyarrow==19.0.1; python_version >= '3.9' From fb44860197e4021b69972c145174c196302e115c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 21 Apr 2025 15:36:22 -0700 Subject: [PATCH 313/338] chore: Update gapic-generator-python to 1.24.1 (#924) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to 1.24.0 PiperOrigin-RevId: 747419463 Source-Link: https://github.com/googleapis/googleapis/commit/340579bf7f97ba56cda0c70176dc5b03a8357667 Source-Link: https://github.com/googleapis/googleapis-gen/commit/e8997ec5136ecb6ed9a969a4c2f13b3ab6a17c12 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTg5OTdlYzUxMzZlY2I2ZWQ5YTk2OWE0YzJmMTNiM2FiNmExN2MxMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: increased the number of partitions can be written in a single request docs: updated the number of partitions (from 100 to 900) can be inserted, updated and deleted in a single request BREAKING CHANGE: remove `location` from http annotations in all of the service requests PiperOrigin-RevId: 747473743 Source-Link: https://github.com/googleapis/googleapis/commit/1c153adc542b4c915eeab5290bc42581c821cc93 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b5aa0e891f457b1c5ce75b120d1b65c8738776b5 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjVhYTBlODkxZjQ1N2IxYzVjZTc1YjEyMGQxYjY1Yzg3Mzg3NzZiNSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to 1.24.1 PiperOrigin-RevId: 748739072 Source-Link: https://github.com/googleapis/googleapis/commit/b947e523934dbac5d97613d8aa08e04fc38c5fb6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/8c5821aa65a921d59b3f7653d6f37c9c67410c2f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOGM1ODIxYWE2NWE5MjFkNTliM2Y3NjUzZDZmMzdjOWM2NzQxMGMyZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index a569a72b57b..c9e425d474f 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.30.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index 2c3e4a3e6d1..e92a81e4da6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.30.0" + "version": "0.1.0" }, "snippets": [ { From 03272ac38f74c69280428f7f89226bfb5ce087ce Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 23 Apr 2025 10:10:48 -0700 Subject: [PATCH 314/338] chore(main): release 2.31.0 (#925) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c9e425d474f..a662fdfe331 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.31.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index e92a81e4da6..bafd0f761e2 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.31.0" }, "snippets": [ { From 470bc21e516285a07cb76e9880838872ebfe6c59 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 1 May 2025 09:35:57 -0700 Subject: [PATCH 315/338] feat: Add BigQuery Metastore Partition Service API version v1beta (#941) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add BigQuery Metastore Partition Service API version v1beta PiperOrigin-RevId: 753333720 Source-Link: https://github.com/googleapis/googleapis/commit/389abd1ee15f6d6f79dcd76120b1e29d74edb23b Source-Link: https://github.com/googleapis/googleapis-gen/commit/a87588791ac0057b5bcd5b91e497a5daff66ebbc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTg3NTg4NzkxYWMwMDU3YjViY2Q1YjkxZTQ5N2E1ZGFmZjY2ZWJiYyJ9 * update replacement in owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Add gapic_version.py * update gapic_version.py * update docs/index.rst --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- ...batch_create_metastore_partitions_async.py | 58 ++ ..._batch_create_metastore_partitions_sync.py | 58 ++ ...batch_delete_metastore_partitions_async.py | 54 ++ ..._batch_delete_metastore_partitions_sync.py | 54 ++ ...batch_update_metastore_partitions_async.py | 57 ++ ..._batch_update_metastore_partitions_sync.py | 57 ++ ...service_list_metastore_partitions_async.py | 53 ++ ..._service_list_metastore_partitions_sync.py | 53 ++ ...rvice_stream_metastore_partitions_async.py | 64 ++ ...ervice_stream_metastore_partitions_sync.py | 64 ++ ...data_google.cloud.bigquery.storage.v1.json | 2 +- ..._google.cloud.bigquery.storage.v1beta.json | 782 ++++++++++++++++++ ...google.cloud.bigquery.storage.v1beta2.json | 2 +- 13 files changed, 1356 insertions(+), 2 deletions(-) create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_sync.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_async.py create mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_sync.py create mode 100644 bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta.json diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_async.py new file mode 100644 index 00000000000..b9dfc858246 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1beta + + +async def sample_batch_create_metastore_partitions(): + # Create a client + client = bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient() + + # Initialize request argument(s) + requests = bigquery_storage_v1beta.CreateMetastorePartitionRequest() + requests.parent = "parent_value" + requests.metastore_partition.values = ["values_value1", "values_value2"] + + request = bigquery_storage_v1beta.BatchCreateMetastorePartitionsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = await client.batch_create_metastore_partitions(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py new file mode 100644 index 00000000000..82e44c12280 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1beta + + +def sample_batch_create_metastore_partitions(): + # Create a client + client = bigquery_storage_v1beta.MetastorePartitionServiceClient() + + # Initialize request argument(s) + requests = bigquery_storage_v1beta.CreateMetastorePartitionRequest() + requests.parent = "parent_value" + requests.metastore_partition.values = ["values_value1", "values_value2"] + + request = bigquery_storage_v1beta.BatchCreateMetastorePartitionsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_create_metastore_partitions(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py new file mode 100644 index 00000000000..08b3739d076 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1beta + + +async def sample_batch_delete_metastore_partitions(): + # Create a client + client = bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient() + + # Initialize request argument(s) + partition_values = bigquery_storage_v1beta.MetastorePartitionValues() + partition_values.values = ["values_value1", "values_value2"] + + request = bigquery_storage_v1beta.BatchDeleteMetastorePartitionsRequest( + parent="parent_value", + partition_values=partition_values, + ) + + # Make the request + await client.batch_delete_metastore_partitions(request=request) + + +# [END bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py new file mode 100644 index 00000000000..97127a18ebe --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1beta + + +def sample_batch_delete_metastore_partitions(): + # Create a client + client = bigquery_storage_v1beta.MetastorePartitionServiceClient() + + # Initialize request argument(s) + partition_values = bigquery_storage_v1beta.MetastorePartitionValues() + partition_values.values = ["values_value1", "values_value2"] + + request = bigquery_storage_v1beta.BatchDeleteMetastorePartitionsRequest( + parent="parent_value", + partition_values=partition_values, + ) + + # Make the request + client.batch_delete_metastore_partitions(request=request) + + +# [END bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_async.py new file mode 100644 index 00000000000..f391a15193e --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1beta + + +async def sample_batch_update_metastore_partitions(): + # Create a client + client = bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient() + + # Initialize request argument(s) + requests = bigquery_storage_v1beta.UpdateMetastorePartitionRequest() + requests.metastore_partition.values = ["values_value1", "values_value2"] + + request = bigquery_storage_v1beta.BatchUpdateMetastorePartitionsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = await client.batch_update_metastore_partitions(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py new file mode 100644 index 00000000000..b707fece719 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1beta + + +def sample_batch_update_metastore_partitions(): + # Create a client + client = bigquery_storage_v1beta.MetastorePartitionServiceClient() + + # Initialize request argument(s) + requests = bigquery_storage_v1beta.UpdateMetastorePartitionRequest() + requests.metastore_partition.values = ["values_value1", "values_value2"] + + request = bigquery_storage_v1beta.BatchUpdateMetastorePartitionsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_update_metastore_partitions(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_async.py new file mode 100644 index 00000000000..fc921d19096 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1beta + + +async def sample_list_metastore_partitions(): + # Create a client + client = bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta.ListMetastorePartitionsRequest( + parent="parent_value", + ) + + # Make the request + response = await client.list_metastore_partitions(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_sync.py new file mode 100644 index 00000000000..5091e3effe7 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1beta + + +def sample_list_metastore_partitions(): + # Create a client + client = bigquery_storage_v1beta.MetastorePartitionServiceClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta.ListMetastorePartitionsRequest( + parent="parent_value", + ) + + # Make the request + response = client.list_metastore_partitions(request=request) + + # Handle the response + print(response) + + +# [END bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_async.py new file mode 100644 index 00000000000..ee2a6c5dfe2 --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1beta + + +async def sample_stream_metastore_partitions(): + # Create a client + client = bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta.StreamMetastorePartitionsRequest( + parent="parent_value", + ) + + # This method expects an iterator which contains + # 'bigquery_storage_v1beta.StreamMetastorePartitionsRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.stream_metastore_partitions(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + +# [END bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_sync.py new file mode 100644 index 00000000000..b2a85f4e0ed --- /dev/null +++ b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamMetastorePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-storage + + +# [START bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_storage_v1beta + + +def sample_stream_metastore_partitions(): + # Create a client + client = bigquery_storage_v1beta.MetastorePartitionServiceClient() + + # Initialize request argument(s) + request = bigquery_storage_v1beta.StreamMetastorePartitionsRequest( + parent="parent_value", + ) + + # This method expects an iterator which contains + # 'bigquery_storage_v1beta.StreamMetastorePartitionsRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.stream_metastore_partitions(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + +# [END bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index a662fdfe331..c9e425d474f 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.31.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta.json new file mode 100644 index 00000000000..7cad599102c --- /dev/null +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta.json @@ -0,0 +1,782 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.bigquery.storage.v1beta", + "version": "v1beta" + } + ], + "language": "PYTHON", + "name": "google-cloud-bigquery-storage", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient", + "shortName": "MetastorePartitionServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient.batch_create_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchCreateMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "BatchCreateMetastorePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta.types.BatchCreateMetastorePartitionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta.types.BatchCreateMetastorePartitionsResponse", + "shortName": "batch_create_metastore_partitions" + }, + "description": "Sample for BatchCreateMetastorePartitions", + "file": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient", + "shortName": "MetastorePartitionServiceClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient.batch_create_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchCreateMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "BatchCreateMetastorePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta.types.BatchCreateMetastorePartitionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta.types.BatchCreateMetastorePartitionsResponse", + "shortName": "batch_create_metastore_partitions" + }, + "description": "Sample for BatchCreateMetastorePartitions", + "file": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient", + "shortName": "MetastorePartitionServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient.batch_delete_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchDeleteMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "BatchDeleteMetastorePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta.types.BatchDeleteMetastorePartitionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "batch_delete_metastore_partitions" + }, + "description": "Sample for BatchDeleteMetastorePartitions", + "file": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient", + "shortName": "MetastorePartitionServiceClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient.batch_delete_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchDeleteMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "BatchDeleteMetastorePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta.types.BatchDeleteMetastorePartitionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "batch_delete_metastore_partitions" + }, + "description": "Sample for BatchDeleteMetastorePartitions", + "file": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient", + "shortName": "MetastorePartitionServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient.batch_update_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchUpdateMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "BatchUpdateMetastorePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta.types.BatchUpdateMetastorePartitionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta.types.BatchUpdateMetastorePartitionsResponse", + "shortName": "batch_update_metastore_partitions" + }, + "description": "Sample for BatchUpdateMetastorePartitions", + "file": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient", + "shortName": "MetastorePartitionServiceClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient.batch_update_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchUpdateMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "BatchUpdateMetastorePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta.types.BatchUpdateMetastorePartitionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta.types.BatchUpdateMetastorePartitionsResponse", + "shortName": "batch_update_metastore_partitions" + }, + "description": "Sample for BatchUpdateMetastorePartitions", + "file": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient", + "shortName": "MetastorePartitionServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient.list_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.ListMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "ListMetastorePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta.types.ListMetastorePartitionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta.types.ListMetastorePartitionsResponse", + "shortName": "list_metastore_partitions" + }, + "description": "Sample for ListMetastorePartitions", + "file": "bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient", + "shortName": "MetastorePartitionServiceClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient.list_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.ListMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "ListMetastorePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_storage_v1beta.types.ListMetastorePartitionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.bigquery_storage_v1beta.types.ListMetastorePartitionsResponse", + "shortName": "list_metastore_partitions" + }, + "description": "Sample for ListMetastorePartitions", + "file": "bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient", + "shortName": "MetastorePartitionServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient.stream_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.StreamMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "StreamMetastorePartitions" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.bigquery_storage_v1beta.types.StreamMetastorePartitionsRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1beta.types.StreamMetastorePartitionsResponse]", + "shortName": "stream_metastore_partitions" + }, + "description": "Sample for StreamMetastorePartitions", + "file": "bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient", + "shortName": "MetastorePartitionServiceClient" + }, + "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient.stream_metastore_partitions", + "method": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.StreamMetastorePartitions", + "service": { + "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", + "shortName": "MetastorePartitionService" + }, + "shortName": "StreamMetastorePartitions" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.bigquery_storage_v1beta.types.StreamMetastorePartitionsRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.bigquery_storage_v1beta.types.StreamMetastorePartitionsResponse]", + "shortName": "stream_metastore_partitions" + }, + "description": "Sample for StreamMetastorePartitions", + "file": "bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_sync.py" + } + ] +} diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index bafd0f761e2..e92a81e4da6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.31.0" + "version": "0.1.0" }, "snippets": [ { From c90da02297aecc542577d4b4c98534e161c99f68 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 6 May 2025 20:13:34 +0200 Subject: [PATCH 316/338] chore(deps): update all dependencies (#922) * chore(deps): update all dependencies * Update requirements.txt * Update requirements.txt * Update docs.yml * Update docs.yml * Update lint.yml --------- Co-authored-by: Lingqing Gan --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements.txt | 4 ++-- bigquery_storage/to_dataframe/requirements.txt | 12 ++++++------ 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 9e56bca4dfd..d36772241dd 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.30.0 +google-cloud-bigquery-storage==2.31.0 diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index acfea47c216..6e1ccbe6d0a 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,3 +1,3 @@ -google-cloud-testutils==1.6.0 +google-cloud-testutils==1.6.2 pytest===7.4.3; python_version == '3.7' pytest==8.3.5; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 06c0e62f1da..1fa22d1d1a1 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-bigquery-storage==2.30.0 -google-cloud-bigquery==3.30.0; python_version <= '3.8' +google-cloud-bigquery-storage==2.31.0 +google-cloud-bigquery===3.30.0; python_version <= '3.8' google-cloud-bigquery==3.31.0; python_version >= '3.9' diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 030552a12d3..044297f462e 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,16 +1,16 @@ -google-auth==2.38.0 -google-cloud-bigquery-storage==2.30.0 -google-cloud-bigquery==3.30.0; python_version <= '3.8' +google-auth==2.39.0 +google-cloud-bigquery-storage==2.31.0 +google-cloud-bigquery===3.30.0; python_version <= '3.8' google-cloud-bigquery==3.31.0; python_version >= '3.9' pyarrow===12.0.1; python_version == '3.7' pyarrow===17.0.0; python_version == '3.8' -pyarrow==19.0.1; python_version >= '3.9' +pyarrow==20.0.0; python_version >= '3.9' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' ipython===8.33.0; python_version == '3.10' -ipython==9.0.2; python_version >= '3.11' -ipywidgets==8.1.5 +ipython==9.2.0; python_version >= '3.11' +ipywidgets==8.1.7 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' pandas==2.2.3; python_version >= '3.9' From d44d4cbdd810ae053ae11ba4d16f29572b749082 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 13 May 2025 01:37:14 +0200 Subject: [PATCH 317/338] chore(deps): update all dependencies (#943) * chore(deps): update all dependencies * Update docs.yml * Update lint.yml * Update requirements.txt --------- Co-authored-by: Lingqing Gan --- bigquery_storage/to_dataframe/requirements.txt | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index 044297f462e..ec84fe81e42 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,4 +1,4 @@ -google-auth==2.39.0 +google-auth==2.40.1 google-cloud-bigquery-storage==2.31.0 google-cloud-bigquery===3.30.0; python_version <= '3.8' google-cloud-bigquery==3.31.0; python_version >= '3.9' @@ -15,4 +15,5 @@ pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' pandas==2.2.3; python_version >= '3.9' tqdm==4.67.1 -db-dtypes==1.4.2 +db-dtypes===1.4.2; python_version <= '3.8' +db-dtypes==1.4.3; python_version >= '3.9' From 95bace7dd23bbd5f839c4dc63a10f1e52ed29c44 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 22 May 2025 23:50:57 +0200 Subject: [PATCH 318/338] chore(deps): update all dependencies (#945) * chore(deps): update all dependencies * Update docs.yml * Update lint.yml --------- Co-authored-by: Lingqing Gan --- bigquery_storage/snippets/requirements-test.txt | 2 +- bigquery_storage/snippets/requirements.txt | 2 +- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index 6e1ccbe6d0a..a6c65ca6567 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,3 +1,3 @@ -google-cloud-testutils==1.6.2 +google-cloud-testutils==1.6.4 pytest===7.4.3; python_version == '3.7' pytest==8.3.5; python_version >= '3.8' diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 1fa22d1d1a1..30eaa4f434b 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ google-cloud-bigquery-storage==2.31.0 google-cloud-bigquery===3.30.0; python_version <= '3.8' -google-cloud-bigquery==3.31.0; python_version >= '3.9' +google-cloud-bigquery==3.33.0; python_version >= '3.9' diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index ec84fe81e42..cc4d98c87dc 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,7 +1,7 @@ -google-auth==2.40.1 +google-auth==2.40.2 google-cloud-bigquery-storage==2.31.0 google-cloud-bigquery===3.30.0; python_version <= '3.8' -google-cloud-bigquery==3.31.0; python_version >= '3.9' +google-cloud-bigquery==3.33.0; python_version >= '3.9' pyarrow===12.0.1; python_version == '3.7' pyarrow===17.0.0; python_version == '3.8' pyarrow==20.0.0; python_version >= '3.9' From e2b3376bb676f2e8762fb9c497362ca15ad1dbf0 Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Tue, 27 May 2025 10:15:24 -0700 Subject: [PATCH 319/338] fix: separate create_stream() in pyarrow sample (#946) * fix: separate create_stream() in pyarrow sample * remove append_rows() * lint * add blank row --- .../pyarrow/append_rows_with_arrow.py | 39 +++++++++++-------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/bigquery_storage/pyarrow/append_rows_with_arrow.py b/bigquery_storage/pyarrow/append_rows_with_arrow.py index 84e03531fc1..e96077299fd 100644 --- a/bigquery_storage/pyarrow/append_rows_with_arrow.py +++ b/bigquery_storage/pyarrow/append_rows_with_arrow.py @@ -174,19 +174,6 @@ def generate_write_requests(pyarrow_table): yield request -def append_rows(bqstorage_write_client, table): - append_rows_stream = create_stream(bqstorage_write_client, table) - pyarrow_table = generate_pyarrow_table() - futures = [] - - for request in generate_write_requests(pyarrow_table): - response_future = append_rows_stream.send(request) - futures.append(response_future) - response_future.result() - - return futures - - def verify_result(client, table, futures): bq_table = client.get_table(table) @@ -196,6 +183,7 @@ def verify_result(client, table, futures): # Verify table size. query = client.query(f"SELECT COUNT(1) FROM `{bq_table}`;") query_result = query.result().to_dataframe() + # There might be extra rows due to retries. assert query_result.iloc[0, 0] >= TABLE_LENGTH @@ -204,9 +192,28 @@ def verify_result(client, table, futures): def main(project_id, dataset): + # Initialize clients. write_client = bqstorage_write_client() bq_client = bigquery.Client() - table = make_table(project_id, dataset.dataset_id, bq_client) - futures = append_rows(write_client, table) - verify_result(bq_client, table, futures) + # Create BigQuery table. + bq_table = make_table(project_id, dataset.dataset_id, bq_client) + + # Generate local PyArrow table. + pa_table = generate_pyarrow_table() + + # Convert PyArrow table to Protobuf requests. + requests = generate_write_requests(pa_table) + + # Create writing stream to the BigQuery table. + stream = create_stream(write_client, bq_table) + + # Send requests. + futures = [] + for request in requests: + future = stream.send(request) + futures.append(future) + future.result() # Optional, will block until writing is complete. + + # Verify results. + verify_result(bq_client, bq_table, futures) From c4e3e3a50aac6531e620102159c7979ae69cdb80 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 28 May 2025 11:18:51 -0700 Subject: [PATCH 320/338] chore(main): release 2.32.0 (#942) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index c9e425d474f..107928d6bb9 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.32.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index e92a81e4da6..fce8677537d 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "0.1.0" + "version": "2.32.0" }, "snippets": [ { From 8d239345ba3be3edb46707c2ed31ea0d698e594f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 30 May 2025 23:55:47 +0200 Subject: [PATCH 321/338] chore(deps): update all dependencies (#947) * chore(deps): update all dependencies * Update docs.yml * Update lint.yml --------- Co-authored-by: Lingqing Gan --- bigquery_storage/quickstart/requirements.txt | 2 +- bigquery_storage/snippets/requirements.txt | 4 ++-- bigquery_storage/to_dataframe/requirements.txt | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index d36772241dd..7ddaaef724a 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,2 @@ fastavro -google-cloud-bigquery-storage==2.31.0 +google-cloud-bigquery-storage==2.32.0 diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 30eaa4f434b..c34026ed533 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-bigquery-storage==2.31.0 +google-cloud-bigquery-storage==2.32.0 google-cloud-bigquery===3.30.0; python_version <= '3.8' -google-cloud-bigquery==3.33.0; python_version >= '3.9' +google-cloud-bigquery==3.34.0; python_version >= '3.9' diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index cc4d98c87dc..d12e3e9c48b 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,7 +1,7 @@ google-auth==2.40.2 -google-cloud-bigquery-storage==2.31.0 +google-cloud-bigquery-storage==2.32.0 google-cloud-bigquery===3.30.0; python_version <= '3.8' -google-cloud-bigquery==3.33.0; python_version >= '3.9' +google-cloud-bigquery==3.34.0; python_version >= '3.9' pyarrow===12.0.1; python_version == '3.7' pyarrow===17.0.0; python_version == '3.8' pyarrow==20.0.0; python_version >= '3.9' From 5ed1b37d8f5f9f5727368e0d16c616985fada5cb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 18 Aug 2025 10:32:18 -0700 Subject: [PATCH 322/338] feat: Add support for CMEK, runtime controls, and PSC-I to Reasoning Engine protos (#968) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add support for CMEK, runtime controls, and PSC-I to Reasoning Engine protos docs: Add psc_interface_config, min/max_instances, resource_limits, container_concurrency to ReasoningEngineSpec docs: Add encryption_spec to ReasoningEngine PiperOrigin-RevId: 792745180 Source-Link: https://github.com/googleapis/googleapis/commit/85f7aecabe1da39d3377ab87135ea21c9169a6ea Source-Link: https://github.com/googleapis/googleapis-gen/commit/d68782f69bd428937f9bdc38cfc327e1457a85bf Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZDY4NzgyZjY5YmQ0Mjg5MzdmOWJkYzM4Y2ZjMzI3ZTE0NTdhODViZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Lingqing Gan --- .../snippet_metadata_google.cloud.bigquery.storage.v1.json | 2 +- .../snippet_metadata_google.cloud.bigquery.storage.v1beta2.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json index 107928d6bb9..c9e425d474f 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.32.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json index fce8677537d..e92a81e4da6 100644 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-storage", - "version": "2.32.0" + "version": "0.1.0" }, "snippets": [ { From a7a966623955c5ba3cbcb9e8feb90873796a320b Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 18 Aug 2025 20:03:52 +0200 Subject: [PATCH 323/338] chore(deps): update all dependencies (#967) * chore(deps): update all dependencies * Update requirements-test.txt * Update requirements-test.txt * Update requirements-test.txt * Update requirements.txt * Update requirements-test.txt * Update requirements.txt --------- Co-authored-by: Lingqing Gan --- bigquery_storage/pyarrow/requirements-test.txt | 3 ++- bigquery_storage/quickstart/requirements-test.txt | 3 ++- bigquery_storage/snippets/requirements-test.txt | 3 ++- bigquery_storage/snippets/requirements.txt | 3 +++ bigquery_storage/to_dataframe/requirements-test.txt | 3 ++- bigquery_storage/to_dataframe/requirements.txt | 10 +++++----- 6 files changed, 16 insertions(+), 9 deletions(-) diff --git a/bigquery_storage/pyarrow/requirements-test.txt b/bigquery_storage/pyarrow/requirements-test.txt index 0404a8ad211..7561ed55ce2 100644 --- a/bigquery_storage/pyarrow/requirements-test.txt +++ b/bigquery_storage/pyarrow/requirements-test.txt @@ -1,2 +1,3 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.3.5; python_version >= '3.8' +pytest===8.3.5; python_version == '3.8' +pytest==8.4.1; python_version >= '3.9' diff --git a/bigquery_storage/quickstart/requirements-test.txt b/bigquery_storage/quickstart/requirements-test.txt index 0404a8ad211..7561ed55ce2 100644 --- a/bigquery_storage/quickstart/requirements-test.txt +++ b/bigquery_storage/quickstart/requirements-test.txt @@ -1,2 +1,3 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.3.5; python_version >= '3.8' +pytest===8.3.5; python_version == '3.8' +pytest==8.4.1; python_version >= '3.9' diff --git a/bigquery_storage/snippets/requirements-test.txt b/bigquery_storage/snippets/requirements-test.txt index a6c65ca6567..230ca56dc3a 100644 --- a/bigquery_storage/snippets/requirements-test.txt +++ b/bigquery_storage/snippets/requirements-test.txt @@ -1,3 +1,4 @@ google-cloud-testutils==1.6.4 pytest===7.4.3; python_version == '3.7' -pytest==8.3.5; python_version >= '3.8' +pytest===8.3.5; python_version == '3.8' +pytest==8.4.1; python_version >= '3.9' diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index c34026ed533..0ae6c4dfa76 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,3 +1,6 @@ google-cloud-bigquery-storage==2.32.0 google-cloud-bigquery===3.30.0; python_version <= '3.8' google-cloud-bigquery==3.34.0; python_version >= '3.9' +pytest===7.4.3; python_version == '3.7' +pytest===8.3.5; python_version == '3.8' +pytest==8.4.1; python_version >= '3.9' diff --git a/bigquery_storage/to_dataframe/requirements-test.txt b/bigquery_storage/to_dataframe/requirements-test.txt index 0404a8ad211..7561ed55ce2 100644 --- a/bigquery_storage/to_dataframe/requirements-test.txt +++ b/bigquery_storage/to_dataframe/requirements-test.txt @@ -1,2 +1,3 @@ pytest===7.4.3; python_version == '3.7' -pytest==8.3.5; python_version >= '3.8' +pytest===8.3.5; python_version == '3.8' +pytest==8.4.1; python_version >= '3.9' diff --git a/bigquery_storage/to_dataframe/requirements.txt b/bigquery_storage/to_dataframe/requirements.txt index d12e3e9c48b..e3b75fdaf5f 100644 --- a/bigquery_storage/to_dataframe/requirements.txt +++ b/bigquery_storage/to_dataframe/requirements.txt @@ -1,19 +1,19 @@ -google-auth==2.40.2 +google-auth==2.40.3 google-cloud-bigquery-storage==2.32.0 google-cloud-bigquery===3.30.0; python_version <= '3.8' -google-cloud-bigquery==3.34.0; python_version >= '3.9' +google-cloud-bigquery==3.35.1; python_version >= '3.9' pyarrow===12.0.1; python_version == '3.7' pyarrow===17.0.0; python_version == '3.8' -pyarrow==20.0.0; python_version >= '3.9' +pyarrow==21.0.0; python_version >= '3.9' ipython===7.31.1; python_version == '3.7' ipython===8.10.0; python_version == '3.8' ipython===8.18.1; python_version == '3.9' ipython===8.33.0; python_version == '3.10' -ipython==9.2.0; python_version >= '3.11' +ipython==9.4.0; python_version >= '3.11' ipywidgets==8.1.7 pandas===1.3.5; python_version == '3.7' pandas===2.0.3; python_version == '3.8' -pandas==2.2.3; python_version >= '3.9' +pandas==2.3.1; python_version >= '3.9' tqdm==4.67.1 db-dtypes===1.4.2; python_version <= '3.8' db-dtypes==1.4.3; python_version >= '3.9' From e6210faf8c12809de84846c263e0de49701e2be1 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 18 Aug 2025 23:45:47 +0200 Subject: [PATCH 324/338] chore(deps): update dependency google-cloud-bigquery to v3.35.1 (#971) --- bigquery_storage/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery_storage/snippets/requirements.txt b/bigquery_storage/snippets/requirements.txt index 0ae6c4dfa76..8a456493526 100644 --- a/bigquery_storage/snippets/requirements.txt +++ b/bigquery_storage/snippets/requirements.txt @@ -1,6 +1,6 @@ google-cloud-bigquery-storage==2.32.0 google-cloud-bigquery===3.30.0; python_version <= '3.8' -google-cloud-bigquery==3.34.0; python_version >= '3.9' +google-cloud-bigquery==3.35.1; python_version >= '3.9' pytest===7.4.3; python_version == '3.7' pytest===8.3.5; python_version == '3.8' pytest==8.4.1; python_version >= '3.9' From 61393facddb994f2d26ab98df19f4639f7f90238 Mon Sep 17 00:00:00 2001 From: Linchin Date: Thu, 21 Aug 2025 17:34:22 +0000 Subject: [PATCH 325/338] add headers --- bigquery_storage/snippets/customer_record_pb2.py | 14 ++++++++++++++ bigquery_storage/snippets/sample_data_pb2.py | 14 ++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/bigquery_storage/snippets/customer_record_pb2.py b/bigquery_storage/snippets/customer_record_pb2.py index a76fa02d313..457ead954d8 100644 --- a/bigquery_storage/snippets/customer_record_pb2.py +++ b/bigquery_storage/snippets/customer_record_pb2.py @@ -1,3 +1,17 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: customer_record.proto diff --git a/bigquery_storage/snippets/sample_data_pb2.py b/bigquery_storage/snippets/sample_data_pb2.py index 0f261b34343..54ef06d99fa 100644 --- a/bigquery_storage/snippets/sample_data_pb2.py +++ b/bigquery_storage/snippets/sample_data_pb2.py @@ -1,3 +1,17 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: sample_data.proto From 8ae82b0d423d0bb73fa104073194c773fdc9c1a4 Mon Sep 17 00:00:00 2001 From: Linchin Date: Thu, 21 Aug 2025 17:35:12 +0000 Subject: [PATCH 326/338] delete generated samples --- ...ig_query_read_create_read_session_async.py | 53 - ...big_query_read_create_read_session_sync.py | 53 - ...enerated_big_query_read_read_rows_async.py | 54 - ...generated_big_query_read_read_rows_sync.py | 54 - ..._big_query_read_split_read_stream_async.py | 53 - ...d_big_query_read_split_read_stream_sync.py | 53 - ...rated_big_query_write_append_rows_async.py | 64 - ...erated_big_query_write_append_rows_sync.py | 64 - ..._write_batch_commit_write_streams_async.py | 54 - ...y_write_batch_commit_write_streams_sync.py | 54 - ...g_query_write_create_write_stream_async.py | 53 - ...ig_query_write_create_write_stream_sync.py | 53 - ...query_write_finalize_write_stream_async.py | 53 - ..._query_write_finalize_write_stream_sync.py | 53 - ...erated_big_query_write_flush_rows_async.py | 53 - ...nerated_big_query_write_flush_rows_sync.py | 53 - ..._big_query_write_get_write_stream_async.py | 53 - ...d_big_query_write_get_write_stream_sync.py | 53 - ...batch_create_metastore_partitions_async.py | 58 - ..._batch_create_metastore_partitions_sync.py | 58 - ...batch_delete_metastore_partitions_async.py | 54 - ..._batch_delete_metastore_partitions_sync.py | 54 - ...batch_update_metastore_partitions_async.py | 57 - ..._batch_update_metastore_partitions_sync.py | 57 - ...service_list_metastore_partitions_async.py | 53 - ..._service_list_metastore_partitions_sync.py | 53 - ...rvice_stream_metastore_partitions_async.py | 64 - ...ervice_stream_metastore_partitions_sync.py | 64 - ...ig_query_read_create_read_session_async.py | 53 - ...big_query_read_create_read_session_sync.py | 53 - ...enerated_big_query_read_read_rows_async.py | 54 - ...generated_big_query_read_read_rows_sync.py | 54 - ..._big_query_read_split_read_stream_async.py | 53 - ...d_big_query_read_split_read_stream_sync.py | 53 - ...rated_big_query_write_append_rows_async.py | 64 - ...erated_big_query_write_append_rows_sync.py | 64 - ..._write_batch_commit_write_streams_async.py | 54 - ...y_write_batch_commit_write_streams_sync.py | 54 - ...g_query_write_create_write_stream_async.py | 53 - ...ig_query_write_create_write_stream_sync.py | 53 - ...query_write_finalize_write_stream_async.py | 53 - ..._query_write_finalize_write_stream_sync.py | 53 - ...erated_big_query_write_flush_rows_async.py | 53 - ...nerated_big_query_write_flush_rows_sync.py | 53 - ..._big_query_write_get_write_stream_async.py | 53 - ...d_big_query_write_get_write_stream_sync.py | 53 - ...batch_create_metastore_partitions_async.py | 58 - ..._batch_create_metastore_partitions_sync.py | 58 - ...batch_delete_metastore_partitions_async.py | 54 - ..._batch_delete_metastore_partitions_sync.py | 54 - ...batch_update_metastore_partitions_async.py | 57 - ..._batch_update_metastore_partitions_sync.py | 57 - ...service_list_metastore_partitions_async.py | 53 - ..._service_list_metastore_partitions_sync.py | 53 - ...rvice_stream_metastore_partitions_async.py | 64 - ...ervice_stream_metastore_partitions_sync.py | 64 - ...data_google.cloud.bigquery.storage.v1.json | 1480 ----------------- ...google.cloud.bigquery.storage.v1alpha.json | 782 --------- ..._google.cloud.bigquery.storage.v1beta.json | 782 --------- ...google.cloud.bigquery.storage.v1beta2.json | 1480 ----------------- 60 files changed, 7628 deletions(-) delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_sync.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_async.py delete mode 100644 bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_sync.py delete mode 100644 bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json delete mode 100644 bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json delete mode 100644 bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta.json delete mode 100644 bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py deleted file mode 100644 index ea25d382c8f..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateReadSession -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -async def sample_create_read_session(): - # Create a client - client = bigquery_storage_v1.BigQueryReadAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.CreateReadSessionRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_read_session(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py deleted file mode 100644 index 13d5a4577a8..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateReadSession -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -def sample_create_read_session(): - # Create a client - client = bigquery_storage_v1.BigQueryReadClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.CreateReadSessionRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_read_session(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py deleted file mode 100644 index fad19adb7d0..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReadRows -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryRead_ReadRows_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -async def sample_read_rows(): - # Create a client - client = bigquery_storage_v1.BigQueryReadAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.ReadRowsRequest( - read_stream="read_stream_value", - ) - - # Make the request - stream = await client.read_rows(request=request) - - # Handle the response - async for response in stream: - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryRead_ReadRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py deleted file mode 100644 index 07aedb01554..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_read_rows_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReadRows -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryRead_ReadRows_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -def sample_read_rows(): - # Create a client - client = bigquery_storage_v1.BigQueryReadClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.ReadRowsRequest( - read_stream="read_stream_value", - ) - - # Make the request - stream = client.read_rows(request=request) - - # Handle the response - for response in stream: - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryRead_ReadRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py deleted file mode 100644 index a814616bf90..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SplitReadStream -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -async def sample_split_read_stream(): - # Create a client - client = bigquery_storage_v1.BigQueryReadAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.SplitReadStreamRequest( - name="name_value", - ) - - # Make the request - response = await client.split_read_stream(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py deleted file mode 100644 index 270872795ed..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SplitReadStream -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -def sample_split_read_stream(): - # Create a client - client = bigquery_storage_v1.BigQueryReadClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.SplitReadStreamRequest( - name="name_value", - ) - - # Make the request - response = client.split_read_stream(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py deleted file mode 100644 index f8bb1c961cb..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AppendRows -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -async def sample_append_rows(): - # Create a client - client = bigquery_storage_v1.BigQueryWriteAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.AppendRowsRequest( - write_stream="write_stream_value", - ) - - # This method expects an iterator which contains - # 'bigquery_storage_v1.AppendRowsRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = await client.append_rows(requests=request_generator()) - - # Handle the response - async for response in stream: - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py deleted file mode 100644 index 2be6201dd70..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_sync.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AppendRows -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryWrite_AppendRows_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -def sample_append_rows(): - # Create a client - client = bigquery_storage_v1.BigQueryWriteClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.AppendRowsRequest( - write_stream="write_stream_value", - ) - - # This method expects an iterator which contains - # 'bigquery_storage_v1.AppendRowsRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = client.append_rows(requests=request_generator()) - - # Handle the response - for response in stream: - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryWrite_AppendRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py deleted file mode 100644 index 65ef174ced0..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchCommitWriteStreams -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -async def sample_batch_commit_write_streams(): - # Create a client - client = bigquery_storage_v1.BigQueryWriteAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.BatchCommitWriteStreamsRequest( - parent="parent_value", - write_streams=["write_streams_value1", "write_streams_value2"], - ) - - # Make the request - response = await client.batch_commit_write_streams(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py deleted file mode 100644 index d2736556fb1..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchCommitWriteStreams -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -def sample_batch_commit_write_streams(): - # Create a client - client = bigquery_storage_v1.BigQueryWriteClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.BatchCommitWriteStreamsRequest( - parent="parent_value", - write_streams=["write_streams_value1", "write_streams_value2"], - ) - - # Make the request - response = client.batch_commit_write_streams(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py deleted file mode 100644 index 3c4bdadeb80..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateWriteStream -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -async def sample_create_write_stream(): - # Create a client - client = bigquery_storage_v1.BigQueryWriteAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.CreateWriteStreamRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_write_stream(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py deleted file mode 100644 index b80374d91c9..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateWriteStream -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -def sample_create_write_stream(): - # Create a client - client = bigquery_storage_v1.BigQueryWriteClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.CreateWriteStreamRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_write_stream(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py deleted file mode 100644 index 18c59d61e01..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FinalizeWriteStream -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -async def sample_finalize_write_stream(): - # Create a client - client = bigquery_storage_v1.BigQueryWriteAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.FinalizeWriteStreamRequest( - name="name_value", - ) - - # Make the request - response = await client.finalize_write_stream(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py deleted file mode 100644 index 62a5dfed2f1..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FinalizeWriteStream -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -def sample_finalize_write_stream(): - # Create a client - client = bigquery_storage_v1.BigQueryWriteClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.FinalizeWriteStreamRequest( - name="name_value", - ) - - # Make the request - response = client.finalize_write_stream(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py deleted file mode 100644 index 4acee8ae560..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FlushRows -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -async def sample_flush_rows(): - # Create a client - client = bigquery_storage_v1.BigQueryWriteAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.FlushRowsRequest( - write_stream="write_stream_value", - ) - - # Make the request - response = await client.flush_rows(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py deleted file mode 100644 index 65dd50cd648..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FlushRows -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryWrite_FlushRows_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -def sample_flush_rows(): - # Create a client - client = bigquery_storage_v1.BigQueryWriteClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.FlushRowsRequest( - write_stream="write_stream_value", - ) - - # Make the request - response = client.flush_rows(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryWrite_FlushRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py deleted file mode 100644 index 346f83769bb..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetWriteStream -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -async def sample_get_write_stream(): - # Create a client - client = bigquery_storage_v1.BigQueryWriteAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.GetWriteStreamRequest( - name="name_value", - ) - - # Make the request - response = await client.get_write_stream(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py deleted file mode 100644 index ebf20ac0bbe..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetWriteStream -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1 - - -def sample_get_write_stream(): - # Create a client - client = bigquery_storage_v1.BigQueryWriteClient() - - # Initialize request argument(s) - request = bigquery_storage_v1.GetWriteStreamRequest( - name="name_value", - ) - - # Make the request - response = client.get_write_stream(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_async.py deleted file mode 100644 index e7b1b056f71..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchCreateMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1alpha - - -async def sample_batch_create_metastore_partitions(): - # Create a client - client = bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient() - - # Initialize request argument(s) - requests = bigquery_storage_v1alpha.CreateMetastorePartitionRequest() - requests.parent = "parent_value" - requests.metastore_partition.values = ["values_value1", "values_value2"] - - request = bigquery_storage_v1alpha.BatchCreateMetastorePartitionsRequest( - parent="parent_value", - requests=requests, - ) - - # Make the request - response = await client.batch_create_metastore_partitions(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py deleted file mode 100644 index 616dd180bd1..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchCreateMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1alpha - - -def sample_batch_create_metastore_partitions(): - # Create a client - client = bigquery_storage_v1alpha.MetastorePartitionServiceClient() - - # Initialize request argument(s) - requests = bigquery_storage_v1alpha.CreateMetastorePartitionRequest() - requests.parent = "parent_value" - requests.metastore_partition.values = ["values_value1", "values_value2"] - - request = bigquery_storage_v1alpha.BatchCreateMetastorePartitionsRequest( - parent="parent_value", - requests=requests, - ) - - # Make the request - response = client.batch_create_metastore_partitions(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py deleted file mode 100644 index cc589bc9e53..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchDeleteMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1alpha - - -async def sample_batch_delete_metastore_partitions(): - # Create a client - client = bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient() - - # Initialize request argument(s) - partition_values = bigquery_storage_v1alpha.MetastorePartitionValues() - partition_values.values = ["values_value1", "values_value2"] - - request = bigquery_storage_v1alpha.BatchDeleteMetastorePartitionsRequest( - parent="parent_value", - partition_values=partition_values, - ) - - # Make the request - await client.batch_delete_metastore_partitions(request=request) - - -# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py deleted file mode 100644 index dc3706ef44b..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchDeleteMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1alpha - - -def sample_batch_delete_metastore_partitions(): - # Create a client - client = bigquery_storage_v1alpha.MetastorePartitionServiceClient() - - # Initialize request argument(s) - partition_values = bigquery_storage_v1alpha.MetastorePartitionValues() - partition_values.values = ["values_value1", "values_value2"] - - request = bigquery_storage_v1alpha.BatchDeleteMetastorePartitionsRequest( - parent="parent_value", - partition_values=partition_values, - ) - - # Make the request - client.batch_delete_metastore_partitions(request=request) - - -# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_async.py deleted file mode 100644 index 7a93f362492..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchUpdateMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1alpha - - -async def sample_batch_update_metastore_partitions(): - # Create a client - client = bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient() - - # Initialize request argument(s) - requests = bigquery_storage_v1alpha.UpdateMetastorePartitionRequest() - requests.metastore_partition.values = ["values_value1", "values_value2"] - - request = bigquery_storage_v1alpha.BatchUpdateMetastorePartitionsRequest( - parent="parent_value", - requests=requests, - ) - - # Make the request - response = await client.batch_update_metastore_partitions(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py deleted file mode 100644 index cedd86550e3..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchUpdateMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1alpha - - -def sample_batch_update_metastore_partitions(): - # Create a client - client = bigquery_storage_v1alpha.MetastorePartitionServiceClient() - - # Initialize request argument(s) - requests = bigquery_storage_v1alpha.UpdateMetastorePartitionRequest() - requests.metastore_partition.values = ["values_value1", "values_value2"] - - request = bigquery_storage_v1alpha.BatchUpdateMetastorePartitionsRequest( - parent="parent_value", - requests=requests, - ) - - # Make the request - response = client.batch_update_metastore_partitions(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_async.py deleted file mode 100644 index 5c52317101c..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1alpha - - -async def sample_list_metastore_partitions(): - # Create a client - client = bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1alpha.ListMetastorePartitionsRequest( - parent="parent_value", - ) - - # Make the request - response = await client.list_metastore_partitions(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_sync.py deleted file mode 100644 index 37389a34069..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1alpha - - -def sample_list_metastore_partitions(): - # Create a client - client = bigquery_storage_v1alpha.MetastorePartitionServiceClient() - - # Initialize request argument(s) - request = bigquery_storage_v1alpha.ListMetastorePartitionsRequest( - parent="parent_value", - ) - - # Make the request - response = client.list_metastore_partitions(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_async.py deleted file mode 100644 index f05ef144611..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_async.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StreamMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1alpha - - -async def sample_stream_metastore_partitions(): - # Create a client - client = bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1alpha.StreamMetastorePartitionsRequest( - parent="parent_value", - ) - - # This method expects an iterator which contains - # 'bigquery_storage_v1alpha.StreamMetastorePartitionsRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = await client.stream_metastore_partitions(requests=request_generator()) - - # Handle the response - async for response in stream: - print(response) - - -# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_sync.py deleted file mode 100644 index 2abac02fed6..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_sync.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StreamMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1alpha - - -def sample_stream_metastore_partitions(): - # Create a client - client = bigquery_storage_v1alpha.MetastorePartitionServiceClient() - - # Initialize request argument(s) - request = bigquery_storage_v1alpha.StreamMetastorePartitionsRequest( - parent="parent_value", - ) - - # This method expects an iterator which contains - # 'bigquery_storage_v1alpha.StreamMetastorePartitionsRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = client.stream_metastore_partitions(requests=request_generator()) - - # Handle the response - for response in stream: - print(response) - - -# [END bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py deleted file mode 100644 index 257faccba25..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateReadSession -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -async def sample_create_read_session(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryReadAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.CreateReadSessionRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_read_session(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py deleted file mode 100644 index 0fc018f2b7c..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateReadSession -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -def sample_create_read_session(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryReadClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.CreateReadSessionRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_read_session(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py deleted file mode 100644 index f82a610000c..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReadRows -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -async def sample_read_rows(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryReadAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.ReadRowsRequest( - read_stream="read_stream_value", - ) - - # Make the request - stream = await client.read_rows(request=request) - - # Handle the response - async for response in stream: - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py deleted file mode 100644 index e106f2c7b98..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReadRows -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -def sample_read_rows(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryReadClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.ReadRowsRequest( - read_stream="read_stream_value", - ) - - # Make the request - stream = client.read_rows(request=request) - - # Handle the response - for response in stream: - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py deleted file mode 100644 index c84b1e7b4e2..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SplitReadStream -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -async def sample_split_read_stream(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryReadAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.SplitReadStreamRequest( - name="name_value", - ) - - # Make the request - response = await client.split_read_stream(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py deleted file mode 100644 index 6a3445a5893..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SplitReadStream -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -def sample_split_read_stream(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryReadClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.SplitReadStreamRequest( - name="name_value", - ) - - # Make the request - response = client.split_read_stream(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py deleted file mode 100644 index e47fbc36a8e..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AppendRows -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -async def sample_append_rows(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.AppendRowsRequest( - write_stream="write_stream_value", - ) - - # This method expects an iterator which contains - # 'bigquery_storage_v1beta2.AppendRowsRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = await client.append_rows(requests=request_generator()) - - # Handle the response - async for response in stream: - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py deleted file mode 100644 index 29e5204bc18..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AppendRows -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -def sample_append_rows(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryWriteClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.AppendRowsRequest( - write_stream="write_stream_value", - ) - - # This method expects an iterator which contains - # 'bigquery_storage_v1beta2.AppendRowsRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = client.append_rows(requests=request_generator()) - - # Handle the response - for response in stream: - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py deleted file mode 100644 index c1fb793265e..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchCommitWriteStreams -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -async def sample_batch_commit_write_streams(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.BatchCommitWriteStreamsRequest( - parent="parent_value", - write_streams=["write_streams_value1", "write_streams_value2"], - ) - - # Make the request - response = await client.batch_commit_write_streams(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py deleted file mode 100644 index 5ca9e6ec389..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchCommitWriteStreams -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -def sample_batch_commit_write_streams(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryWriteClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.BatchCommitWriteStreamsRequest( - parent="parent_value", - write_streams=["write_streams_value1", "write_streams_value2"], - ) - - # Make the request - response = client.batch_commit_write_streams(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py deleted file mode 100644 index bffe548d26d..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateWriteStream -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -async def sample_create_write_stream(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.CreateWriteStreamRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_write_stream(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py deleted file mode 100644 index 416dafdbdeb..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateWriteStream -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -def sample_create_write_stream(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryWriteClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.CreateWriteStreamRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_write_stream(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py deleted file mode 100644 index 1b0ebccb4c1..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FinalizeWriteStream -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -async def sample_finalize_write_stream(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.FinalizeWriteStreamRequest( - name="name_value", - ) - - # Make the request - response = await client.finalize_write_stream(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py deleted file mode 100644 index e40bf3ba6a3..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FinalizeWriteStream -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -def sample_finalize_write_stream(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryWriteClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.FinalizeWriteStreamRequest( - name="name_value", - ) - - # Make the request - response = client.finalize_write_stream(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py deleted file mode 100644 index ec0ca244098..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FlushRows -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -async def sample_flush_rows(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.FlushRowsRequest( - write_stream="write_stream_value", - ) - - # Make the request - response = await client.flush_rows(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py deleted file mode 100644 index d0752d6a4cf..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FlushRows -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -def sample_flush_rows(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryWriteClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.FlushRowsRequest( - write_stream="write_stream_value", - ) - - # Make the request - response = client.flush_rows(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py deleted file mode 100644 index 75074c2db8b..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetWriteStream -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -async def sample_get_write_stream(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryWriteAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.GetWriteStreamRequest( - name="name_value", - ) - - # Make the request - response = await client.get_write_stream(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py deleted file mode 100644 index 62e0f48313a..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetWriteStream -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta2 - - -def sample_get_write_stream(): - # Create a client - client = bigquery_storage_v1beta2.BigQueryWriteClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta2.GetWriteStreamRequest( - name="name_value", - ) - - # Make the request - response = client.get_write_stream(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_async.py deleted file mode 100644 index b9dfc858246..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchCreateMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta - - -async def sample_batch_create_metastore_partitions(): - # Create a client - client = bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient() - - # Initialize request argument(s) - requests = bigquery_storage_v1beta.CreateMetastorePartitionRequest() - requests.parent = "parent_value" - requests.metastore_partition.values = ["values_value1", "values_value2"] - - request = bigquery_storage_v1beta.BatchCreateMetastorePartitionsRequest( - parent="parent_value", - requests=requests, - ) - - # Make the request - response = await client.batch_create_metastore_partitions(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py deleted file mode 100644 index 82e44c12280..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchCreateMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta - - -def sample_batch_create_metastore_partitions(): - # Create a client - client = bigquery_storage_v1beta.MetastorePartitionServiceClient() - - # Initialize request argument(s) - requests = bigquery_storage_v1beta.CreateMetastorePartitionRequest() - requests.parent = "parent_value" - requests.metastore_partition.values = ["values_value1", "values_value2"] - - request = bigquery_storage_v1beta.BatchCreateMetastorePartitionsRequest( - parent="parent_value", - requests=requests, - ) - - # Make the request - response = client.batch_create_metastore_partitions(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py deleted file mode 100644 index 08b3739d076..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchDeleteMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta - - -async def sample_batch_delete_metastore_partitions(): - # Create a client - client = bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient() - - # Initialize request argument(s) - partition_values = bigquery_storage_v1beta.MetastorePartitionValues() - partition_values.values = ["values_value1", "values_value2"] - - request = bigquery_storage_v1beta.BatchDeleteMetastorePartitionsRequest( - parent="parent_value", - partition_values=partition_values, - ) - - # Make the request - await client.batch_delete_metastore_partitions(request=request) - - -# [END bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py deleted file mode 100644 index 97127a18ebe..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchDeleteMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta - - -def sample_batch_delete_metastore_partitions(): - # Create a client - client = bigquery_storage_v1beta.MetastorePartitionServiceClient() - - # Initialize request argument(s) - partition_values = bigquery_storage_v1beta.MetastorePartitionValues() - partition_values.values = ["values_value1", "values_value2"] - - request = bigquery_storage_v1beta.BatchDeleteMetastorePartitionsRequest( - parent="parent_value", - partition_values=partition_values, - ) - - # Make the request - client.batch_delete_metastore_partitions(request=request) - - -# [END bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_async.py deleted file mode 100644 index f391a15193e..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchUpdateMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta - - -async def sample_batch_update_metastore_partitions(): - # Create a client - client = bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient() - - # Initialize request argument(s) - requests = bigquery_storage_v1beta.UpdateMetastorePartitionRequest() - requests.metastore_partition.values = ["values_value1", "values_value2"] - - request = bigquery_storage_v1beta.BatchUpdateMetastorePartitionsRequest( - parent="parent_value", - requests=requests, - ) - - # Make the request - response = await client.batch_update_metastore_partitions(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py deleted file mode 100644 index b707fece719..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchUpdateMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta - - -def sample_batch_update_metastore_partitions(): - # Create a client - client = bigquery_storage_v1beta.MetastorePartitionServiceClient() - - # Initialize request argument(s) - requests = bigquery_storage_v1beta.UpdateMetastorePartitionRequest() - requests.metastore_partition.values = ["values_value1", "values_value2"] - - request = bigquery_storage_v1beta.BatchUpdateMetastorePartitionsRequest( - parent="parent_value", - requests=requests, - ) - - # Make the request - response = client.batch_update_metastore_partitions(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_async.py deleted file mode 100644 index fc921d19096..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta - - -async def sample_list_metastore_partitions(): - # Create a client - client = bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta.ListMetastorePartitionsRequest( - parent="parent_value", - ) - - # Make the request - response = await client.list_metastore_partitions(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_sync.py deleted file mode 100644 index 5091e3effe7..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta - - -def sample_list_metastore_partitions(): - # Create a client - client = bigquery_storage_v1beta.MetastorePartitionServiceClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta.ListMetastorePartitionsRequest( - parent="parent_value", - ) - - # Make the request - response = client.list_metastore_partitions(request=request) - - # Handle the response - print(response) - - -# [END bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_async.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_async.py deleted file mode 100644 index ee2a6c5dfe2..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_async.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StreamMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta - - -async def sample_stream_metastore_partitions(): - # Create a client - client = bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta.StreamMetastorePartitionsRequest( - parent="parent_value", - ) - - # This method expects an iterator which contains - # 'bigquery_storage_v1beta.StreamMetastorePartitionsRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = await client.stream_metastore_partitions(requests=request_generator()) - - # Handle the response - async for response in stream: - print(response) - - -# [END bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_async] diff --git a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_sync.py b/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_sync.py deleted file mode 100644 index b2a85f4e0ed..00000000000 --- a/bigquery_storage/generated_samples/bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_sync.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StreamMetastorePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-storage - - -# [START bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_storage_v1beta - - -def sample_stream_metastore_partitions(): - # Create a client - client = bigquery_storage_v1beta.MetastorePartitionServiceClient() - - # Initialize request argument(s) - request = bigquery_storage_v1beta.StreamMetastorePartitionsRequest( - parent="parent_value", - ) - - # This method expects an iterator which contains - # 'bigquery_storage_v1beta.StreamMetastorePartitionsRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = client.stream_metastore_partitions(requests=request_generator()) - - # Handle the response - for response in stream: - print(response) - - -# [END bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_sync] diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json deleted file mode 100644 index c9e425d474f..00000000000 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1.json +++ /dev/null @@ -1,1480 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.storage.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-storage", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient", - "shortName": "BigQueryReadAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient.create_read_session", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "CreateReadSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.CreateReadSessionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "read_session", - "type": "google.cloud.bigquery_storage_v1.types.ReadSession" - }, - { - "name": "max_stream_count", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.ReadSession", - "shortName": "create_read_session" - }, - "description": "Sample for CreateReadSession", - "file": "bigquerystorage_v1_generated_big_query_read_create_read_session_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_read_create_read_session_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient", - "shortName": "BigQueryReadClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient.create_read_session", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.CreateReadSession", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "CreateReadSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.CreateReadSessionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "read_session", - "type": "google.cloud.bigquery_storage_v1.types.ReadSession" - }, - { - "name": "max_stream_count", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.ReadSession", - "shortName": "create_read_session" - }, - "description": "Sample for CreateReadSession", - "file": "bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryRead_CreateReadSession_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_read_create_read_session_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient", - "shortName": "BigQueryReadAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient.read_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "ReadRows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.ReadRowsRequest" - }, - { - "name": "read_stream", - "type": "str" - }, - { - "name": "offset", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.ReadRowsResponse]", - "shortName": "read_rows" - }, - "description": "Sample for ReadRows", - "file": "bigquerystorage_v1_generated_big_query_read_read_rows_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_read_read_rows_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient", - "shortName": "BigQueryReadClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient.read_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.ReadRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "ReadRows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.ReadRowsRequest" - }, - { - "name": "read_stream", - "type": "str" - }, - { - "name": "offset", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.ReadRowsResponse]", - "shortName": "read_rows" - }, - "description": "Sample for ReadRows", - "file": "bigquerystorage_v1_generated_big_query_read_read_rows_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryRead_ReadRows_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_read_read_rows_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient", - "shortName": "BigQueryReadAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadAsyncClient.split_read_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "SplitReadStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.SplitReadStreamRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.SplitReadStreamResponse", - "shortName": "split_read_stream" - }, - "description": "Sample for SplitReadStream", - "file": "bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_read_split_read_stream_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient", - "shortName": "BigQueryReadClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryReadClient.split_read_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "SplitReadStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.SplitReadStreamRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.SplitReadStreamResponse", - "shortName": "split_read_stream" - }, - "description": "Sample for SplitReadStream", - "file": "bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryRead_SplitReadStream_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_read_split_read_stream_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.append_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "AppendRows" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.cloud.bigquery_storage_v1.types.AppendRowsRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.AppendRowsResponse]", - "shortName": "append_rows" - }, - "description": "Sample for AppendRows", - "file": "bigquerystorage_v1_generated_big_query_write_append_rows_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_append_rows_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.append_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "AppendRows" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.cloud.bigquery_storage_v1.types.AppendRowsRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1.types.AppendRowsResponse]", - "shortName": "append_rows" - }, - "description": "Sample for AppendRows", - "file": "bigquerystorage_v1_generated_big_query_write_append_rows_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_AppendRows_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_append_rows_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.batch_commit_write_streams", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "BatchCommitWriteStreams" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsResponse", - "shortName": "batch_commit_write_streams" - }, - "description": "Sample for BatchCommitWriteStreams", - "file": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.batch_commit_write_streams", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "BatchCommitWriteStreams" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsResponse", - "shortName": "batch_commit_write_streams" - }, - "description": "Sample for BatchCommitWriteStreams", - "file": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_BatchCommitWriteStreams_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_batch_commit_write_streams_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.create_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "CreateWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.CreateWriteStreamRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "write_stream", - "type": "google.cloud.bigquery_storage_v1.types.WriteStream" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", - "shortName": "create_write_stream" - }, - "description": "Sample for CreateWriteStream", - "file": "bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_create_write_stream_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.create_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "CreateWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.CreateWriteStreamRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "write_stream", - "type": "google.cloud.bigquery_storage_v1.types.WriteStream" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", - "shortName": "create_write_stream" - }, - "description": "Sample for CreateWriteStream", - "file": "bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_CreateWriteStream_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_create_write_stream_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.finalize_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "FinalizeWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamResponse", - "shortName": "finalize_write_stream" - }, - "description": "Sample for FinalizeWriteStream", - "file": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.finalize_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "FinalizeWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamResponse", - "shortName": "finalize_write_stream" - }, - "description": "Sample for FinalizeWriteStream", - "file": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FinalizeWriteStream_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_finalize_write_stream_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.flush_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "FlushRows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.FlushRowsRequest" - }, - { - "name": "write_stream", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.FlushRowsResponse", - "shortName": "flush_rows" - }, - "description": "Sample for FlushRows", - "file": "bigquerystorage_v1_generated_big_query_write_flush_rows_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_flush_rows_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.flush_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "FlushRows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.FlushRowsRequest" - }, - { - "name": "write_stream", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.FlushRowsResponse", - "shortName": "flush_rows" - }, - "description": "Sample for FlushRows", - "file": "bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_FlushRows_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_flush_rows_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteAsyncClient.get_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "GetWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.GetWriteStreamRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", - "shortName": "get_write_stream" - }, - "description": "Sample for GetWriteStream", - "file": "bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_get_write_stream_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1.BigQueryWriteClient.get_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "GetWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1.types.GetWriteStreamRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1.types.WriteStream", - "shortName": "get_write_stream" - }, - "description": "Sample for GetWriteStream", - "file": "bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1_generated_BigQueryWrite_GetWriteStream_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1_generated_big_query_write_get_write_stream_sync.py" - } - ] -} diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json deleted file mode 100644 index d55a493695e..00000000000 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1alpha.json +++ /dev/null @@ -1,782 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.storage.v1alpha", - "version": "v1alpha" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-storage", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient", - "shortName": "MetastorePartitionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient.batch_create_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchCreateMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "BatchCreateMetastorePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1alpha.types.BatchCreateMetastorePartitionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1alpha.types.BatchCreateMetastorePartitionsResponse", - "shortName": "batch_create_metastore_partitions" - }, - "description": "Sample for BatchCreateMetastorePartitions", - "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient", - "shortName": "MetastorePartitionServiceClient" - }, - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient.batch_create_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchCreateMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "BatchCreateMetastorePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1alpha.types.BatchCreateMetastorePartitionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1alpha.types.BatchCreateMetastorePartitionsResponse", - "shortName": "batch_create_metastore_partitions" - }, - "description": "Sample for BatchCreateMetastorePartitions", - "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchCreateMetastorePartitions_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient", - "shortName": "MetastorePartitionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient.batch_delete_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchDeleteMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "BatchDeleteMetastorePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1alpha.types.BatchDeleteMetastorePartitionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "batch_delete_metastore_partitions" - }, - "description": "Sample for BatchDeleteMetastorePartitions", - "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient", - "shortName": "MetastorePartitionServiceClient" - }, - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient.batch_delete_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchDeleteMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "BatchDeleteMetastorePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1alpha.types.BatchDeleteMetastorePartitionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "batch_delete_metastore_partitions" - }, - "description": "Sample for BatchDeleteMetastorePartitions", - "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient", - "shortName": "MetastorePartitionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient.batch_update_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchUpdateMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "BatchUpdateMetastorePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1alpha.types.BatchUpdateMetastorePartitionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1alpha.types.BatchUpdateMetastorePartitionsResponse", - "shortName": "batch_update_metastore_partitions" - }, - "description": "Sample for BatchUpdateMetastorePartitions", - "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient", - "shortName": "MetastorePartitionServiceClient" - }, - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient.batch_update_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.BatchUpdateMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "BatchUpdateMetastorePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1alpha.types.BatchUpdateMetastorePartitionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1alpha.types.BatchUpdateMetastorePartitionsResponse", - "shortName": "batch_update_metastore_partitions" - }, - "description": "Sample for BatchUpdateMetastorePartitions", - "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient", - "shortName": "MetastorePartitionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient.list_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.ListMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "ListMetastorePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1alpha.types.ListMetastorePartitionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1alpha.types.ListMetastorePartitionsResponse", - "shortName": "list_metastore_partitions" - }, - "description": "Sample for ListMetastorePartitions", - "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient", - "shortName": "MetastorePartitionServiceClient" - }, - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient.list_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.ListMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "ListMetastorePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1alpha.types.ListMetastorePartitionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1alpha.types.ListMetastorePartitionsResponse", - "shortName": "list_metastore_partitions" - }, - "description": "Sample for ListMetastorePartitions", - "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_ListMetastorePartitions_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_list_metastore_partitions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient", - "shortName": "MetastorePartitionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceAsyncClient.stream_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.StreamMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "StreamMetastorePartitions" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.cloud.bigquery_storage_v1alpha.types.StreamMetastorePartitionsRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1alpha.types.StreamMetastorePartitionsResponse]", - "shortName": "stream_metastore_partitions" - }, - "description": "Sample for StreamMetastorePartitions", - "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient", - "shortName": "MetastorePartitionServiceClient" - }, - "fullName": "google.cloud.bigquery_storage_v1alpha.MetastorePartitionServiceClient.stream_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService.StreamMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1alpha.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "StreamMetastorePartitions" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.cloud.bigquery_storage_v1alpha.types.StreamMetastorePartitionsRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1alpha.types.StreamMetastorePartitionsResponse]", - "shortName": "stream_metastore_partitions" - }, - "description": "Sample for StreamMetastorePartitions", - "file": "bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1alpha_generated_MetastorePartitionService_StreamMetastorePartitions_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1alpha_generated_metastore_partition_service_stream_metastore_partitions_sync.py" - } - ] -} diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta.json deleted file mode 100644 index 7cad599102c..00000000000 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta.json +++ /dev/null @@ -1,782 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.storage.v1beta", - "version": "v1beta" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-storage", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient", - "shortName": "MetastorePartitionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient.batch_create_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchCreateMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "BatchCreateMetastorePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta.types.BatchCreateMetastorePartitionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta.types.BatchCreateMetastorePartitionsResponse", - "shortName": "batch_create_metastore_partitions" - }, - "description": "Sample for BatchCreateMetastorePartitions", - "file": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient", - "shortName": "MetastorePartitionServiceClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient.batch_create_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchCreateMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "BatchCreateMetastorePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta.types.BatchCreateMetastorePartitionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta.types.BatchCreateMetastorePartitionsResponse", - "shortName": "batch_create_metastore_partitions" - }, - "description": "Sample for BatchCreateMetastorePartitions", - "file": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchCreateMetastorePartitions_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_create_metastore_partitions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient", - "shortName": "MetastorePartitionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient.batch_delete_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchDeleteMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "BatchDeleteMetastorePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta.types.BatchDeleteMetastorePartitionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "batch_delete_metastore_partitions" - }, - "description": "Sample for BatchDeleteMetastorePartitions", - "file": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient", - "shortName": "MetastorePartitionServiceClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient.batch_delete_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchDeleteMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "BatchDeleteMetastorePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta.types.BatchDeleteMetastorePartitionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "batch_delete_metastore_partitions" - }, - "description": "Sample for BatchDeleteMetastorePartitions", - "file": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchDeleteMetastorePartitions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_delete_metastore_partitions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient", - "shortName": "MetastorePartitionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient.batch_update_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchUpdateMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "BatchUpdateMetastorePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta.types.BatchUpdateMetastorePartitionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta.types.BatchUpdateMetastorePartitionsResponse", - "shortName": "batch_update_metastore_partitions" - }, - "description": "Sample for BatchUpdateMetastorePartitions", - "file": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient", - "shortName": "MetastorePartitionServiceClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient.batch_update_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.BatchUpdateMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "BatchUpdateMetastorePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta.types.BatchUpdateMetastorePartitionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta.types.BatchUpdateMetastorePartitionsResponse", - "shortName": "batch_update_metastore_partitions" - }, - "description": "Sample for BatchUpdateMetastorePartitions", - "file": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_BatchUpdateMetastorePartitions_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta_generated_metastore_partition_service_batch_update_metastore_partitions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient", - "shortName": "MetastorePartitionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient.list_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.ListMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "ListMetastorePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta.types.ListMetastorePartitionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta.types.ListMetastorePartitionsResponse", - "shortName": "list_metastore_partitions" - }, - "description": "Sample for ListMetastorePartitions", - "file": "bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient", - "shortName": "MetastorePartitionServiceClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient.list_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.ListMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "ListMetastorePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta.types.ListMetastorePartitionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta.types.ListMetastorePartitionsResponse", - "shortName": "list_metastore_partitions" - }, - "description": "Sample for ListMetastorePartitions", - "file": "bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_ListMetastorePartitions_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta_generated_metastore_partition_service_list_metastore_partitions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient", - "shortName": "MetastorePartitionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceAsyncClient.stream_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.StreamMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "StreamMetastorePartitions" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.cloud.bigquery_storage_v1beta.types.StreamMetastorePartitionsRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1beta.types.StreamMetastorePartitionsResponse]", - "shortName": "stream_metastore_partitions" - }, - "description": "Sample for StreamMetastorePartitions", - "file": "bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient", - "shortName": "MetastorePartitionServiceClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta.MetastorePartitionServiceClient.stream_metastore_partitions", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService.StreamMetastorePartitions", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta.MetastorePartitionService", - "shortName": "MetastorePartitionService" - }, - "shortName": "StreamMetastorePartitions" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.cloud.bigquery_storage_v1beta.types.StreamMetastorePartitionsRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1beta.types.StreamMetastorePartitionsResponse]", - "shortName": "stream_metastore_partitions" - }, - "description": "Sample for StreamMetastorePartitions", - "file": "bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta_generated_MetastorePartitionService_StreamMetastorePartitions_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta_generated_metastore_partition_service_stream_metastore_partitions_sync.py" - } - ] -} diff --git a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json b/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json deleted file mode 100644 index e92a81e4da6..00000000000 --- a/bigquery_storage/generated_samples/snippet_metadata_google.cloud.bigquery.storage.v1beta2.json +++ /dev/null @@ -1,1480 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.storage.v1beta2", - "version": "v1beta2" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-storage", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient", - "shortName": "BigQueryReadAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient.create_read_session", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.CreateReadSession", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "CreateReadSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.CreateReadSessionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "read_session", - "type": "google.cloud.bigquery_storage_v1beta2.types.ReadSession" - }, - { - "name": "max_stream_count", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.ReadSession", - "shortName": "create_read_session" - }, - "description": "Sample for CreateReadSession", - "file": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient", - "shortName": "BigQueryReadClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient.create_read_session", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.CreateReadSession", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "CreateReadSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.CreateReadSessionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "read_session", - "type": "google.cloud.bigquery_storage_v1beta2.types.ReadSession" - }, - { - "name": "max_stream_count", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.ReadSession", - "shortName": "create_read_session" - }, - "description": "Sample for CreateReadSession", - "file": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_CreateReadSession_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_read_create_read_session_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient", - "shortName": "BigQueryReadAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient.read_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.ReadRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "ReadRows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.ReadRowsRequest" - }, - { - "name": "read_stream", - "type": "str" - }, - { - "name": "offset", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.ReadRowsResponse]", - "shortName": "read_rows" - }, - "description": "Sample for ReadRows", - "file": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient", - "shortName": "BigQueryReadClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient.read_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.ReadRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "ReadRows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.ReadRowsRequest" - }, - { - "name": "read_stream", - "type": "str" - }, - { - "name": "offset", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.ReadRowsResponse]", - "shortName": "read_rows" - }, - "description": "Sample for ReadRows", - "file": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_ReadRows_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_read_read_rows_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient", - "shortName": "BigQueryReadAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadAsyncClient.split_read_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.SplitReadStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "SplitReadStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamResponse", - "shortName": "split_read_stream" - }, - "description": "Sample for SplitReadStream", - "file": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient", - "shortName": "BigQueryReadClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryReadClient.split_read_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead.SplitReadStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryRead", - "shortName": "BigQueryRead" - }, - "shortName": "SplitReadStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamResponse", - "shortName": "split_read_stream" - }, - "description": "Sample for SplitReadStream", - "file": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryRead_SplitReadStream_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_read_split_read_stream_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.append_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.AppendRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "AppendRows" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.cloud.bigquery_storage_v1beta2.types.AppendRowsRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.AppendRowsResponse]", - "shortName": "append_rows" - }, - "description": "Sample for AppendRows", - "file": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.append_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.AppendRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "AppendRows" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.cloud.bigquery_storage_v1beta2.types.AppendRowsRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.bigquery_storage_v1beta2.types.AppendRowsResponse]", - "shortName": "append_rows" - }, - "description": "Sample for AppendRows", - "file": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_AppendRows_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 55, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 56, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_append_rows_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.batch_commit_write_streams", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.BatchCommitWriteStreams", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "BatchCommitWriteStreams" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsResponse", - "shortName": "batch_commit_write_streams" - }, - "description": "Sample for BatchCommitWriteStreams", - "file": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.batch_commit_write_streams", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.BatchCommitWriteStreams", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "BatchCommitWriteStreams" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsResponse", - "shortName": "batch_commit_write_streams" - }, - "description": "Sample for BatchCommitWriteStreams", - "file": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_BatchCommitWriteStreams_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_batch_commit_write_streams_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.create_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.CreateWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "CreateWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.CreateWriteStreamRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "write_stream", - "type": "google.cloud.bigquery_storage_v1beta2.types.WriteStream" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", - "shortName": "create_write_stream" - }, - "description": "Sample for CreateWriteStream", - "file": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.create_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.CreateWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "CreateWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.CreateWriteStreamRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "write_stream", - "type": "google.cloud.bigquery_storage_v1beta2.types.WriteStream" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", - "shortName": "create_write_stream" - }, - "description": "Sample for CreateWriteStream", - "file": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_CreateWriteStream_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_create_write_stream_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.finalize_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FinalizeWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "FinalizeWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamResponse", - "shortName": "finalize_write_stream" - }, - "description": "Sample for FinalizeWriteStream", - "file": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.finalize_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FinalizeWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "FinalizeWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamResponse", - "shortName": "finalize_write_stream" - }, - "description": "Sample for FinalizeWriteStream", - "file": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FinalizeWriteStream_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_finalize_write_stream_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.flush_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FlushRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "FlushRows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsRequest" - }, - { - "name": "write_stream", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsResponse", - "shortName": "flush_rows" - }, - "description": "Sample for FlushRows", - "file": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.flush_rows", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.FlushRows", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "FlushRows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsRequest" - }, - { - "name": "write_stream", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.FlushRowsResponse", - "shortName": "flush_rows" - }, - "description": "Sample for FlushRows", - "file": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_FlushRows_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_flush_rows_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient", - "shortName": "BigQueryWriteAsyncClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteAsyncClient.get_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.GetWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "GetWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.GetWriteStreamRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", - "shortName": "get_write_stream" - }, - "description": "Sample for GetWriteStream", - "file": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient", - "shortName": "BigQueryWriteClient" - }, - "fullName": "google.cloud.bigquery_storage_v1beta2.BigQueryWriteClient.get_write_stream", - "method": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite.GetWriteStream", - "service": { - "fullName": "google.cloud.bigquery.storage.v1beta2.BigQueryWrite", - "shortName": "BigQueryWrite" - }, - "shortName": "GetWriteStream" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_storage_v1beta2.types.GetWriteStreamRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_storage_v1beta2.types.WriteStream", - "shortName": "get_write_stream" - }, - "description": "Sample for GetWriteStream", - "file": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerystorage_v1beta2_generated_BigQueryWrite_GetWriteStream_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerystorage_v1beta2_generated_big_query_write_get_write_stream_sync.py" - } - ] -} From 385314d830ece2ba07c2793380f4ec9de42c1ab5 Mon Sep 17 00:00:00 2001 From: Linchin Date: Thu, 21 Aug 2025 17:37:21 +0000 Subject: [PATCH 327/338] delete AUTHORING_GUIDE.md and CONTRIBUTING.md --- bigquery_storage/AUTHORING_GUIDE.md | 1 - bigquery_storage/CONTRIBUTING.md | 1 - 2 files changed, 2 deletions(-) delete mode 100644 bigquery_storage/AUTHORING_GUIDE.md delete mode 100644 bigquery_storage/CONTRIBUTING.md diff --git a/bigquery_storage/AUTHORING_GUIDE.md b/bigquery_storage/AUTHORING_GUIDE.md deleted file mode 100644 index 8249522ffc2..00000000000 --- a/bigquery_storage/AUTHORING_GUIDE.md +++ /dev/null @@ -1 +0,0 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/bigquery_storage/CONTRIBUTING.md b/bigquery_storage/CONTRIBUTING.md deleted file mode 100644 index f5fe2e6baf1..00000000000 --- a/bigquery_storage/CONTRIBUTING.md +++ /dev/null @@ -1 +0,0 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/CONTRIBUTING.md \ No newline at end of file From 34969f50ff5e73602c8f192da7e000ef8e90698a Mon Sep 17 00:00:00 2001 From: Linchin Date: Wed, 27 Aug 2025 21:05:52 +0000 Subject: [PATCH 328/338] force tests to fail --- bigquery_storage/pyarrow/append_rows_with_arrow_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bigquery_storage/pyarrow/append_rows_with_arrow_test.py b/bigquery_storage/pyarrow/append_rows_with_arrow_test.py index 2b1c8f1da36..b4ddf28ec26 100644 --- a/bigquery_storage/pyarrow/append_rows_with_arrow_test.py +++ b/bigquery_storage/pyarrow/append_rows_with_arrow_test.py @@ -17,3 +17,4 @@ def test_append_rows_with_arrow(project_id, dataset): append_rows_with_arrow.main(project_id, dataset) + assert 0 From 92a483cbdfe7d56ecfb8e2edb7f582b6f6834b04 Mon Sep 17 00:00:00 2001 From: Linchin Date: Wed, 27 Aug 2025 21:46:47 +0000 Subject: [PATCH 329/338] remove force fail code --- bigquery_storage/pyarrow/append_rows_with_arrow_test.py | 1 - 1 file changed, 1 deletion(-) diff --git a/bigquery_storage/pyarrow/append_rows_with_arrow_test.py b/bigquery_storage/pyarrow/append_rows_with_arrow_test.py index b4ddf28ec26..2b1c8f1da36 100644 --- a/bigquery_storage/pyarrow/append_rows_with_arrow_test.py +++ b/bigquery_storage/pyarrow/append_rows_with_arrow_test.py @@ -17,4 +17,3 @@ def test_append_rows_with_arrow(project_id, dataset): append_rows_with_arrow.main(project_id, dataset) - assert 0 From b3777a7ca1807eb263265230fe244fc4fba92f85 Mon Sep 17 00:00:00 2001 From: Linchin Date: Wed, 27 Aug 2025 21:49:51 +0000 Subject: [PATCH 330/338] add noxfile_config.py --- bigquery_storage/noxfile_config.py | 42 ++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 bigquery_storage/noxfile_config.py diff --git a/bigquery_storage/noxfile_config.py b/bigquery_storage/noxfile_config.py new file mode 100644 index 00000000000..0ed09bc2250 --- /dev/null +++ b/bigquery_storage/noxfile_config.py @@ -0,0 +1,42 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be imported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7", "3.7", "3.8"], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": True, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} \ No newline at end of file From fea315cbc21c18ef6fa3a35979a0a36587f75e8d Mon Sep 17 00:00:00 2001 From: Linchin Date: Wed, 27 Aug 2025 22:24:10 +0000 Subject: [PATCH 331/338] add noxfile_config.py to subfolders --- bigquery_storage/pyarrow/noxfile_config.py | 42 +++++++++++++++++++ bigquery_storage/quickstart/noxfile_config.py | 42 +++++++++++++++++++ bigquery_storage/snippets/noxfile_config.py | 42 +++++++++++++++++++ .../to_dataframe/noxfile_config.py | 42 +++++++++++++++++++ 4 files changed, 168 insertions(+) create mode 100644 bigquery_storage/pyarrow/noxfile_config.py create mode 100644 bigquery_storage/quickstart/noxfile_config.py create mode 100644 bigquery_storage/snippets/noxfile_config.py create mode 100644 bigquery_storage/to_dataframe/noxfile_config.py diff --git a/bigquery_storage/pyarrow/noxfile_config.py b/bigquery_storage/pyarrow/noxfile_config.py new file mode 100644 index 00000000000..0ed09bc2250 --- /dev/null +++ b/bigquery_storage/pyarrow/noxfile_config.py @@ -0,0 +1,42 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be imported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7", "3.7", "3.8"], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": True, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} \ No newline at end of file diff --git a/bigquery_storage/quickstart/noxfile_config.py b/bigquery_storage/quickstart/noxfile_config.py new file mode 100644 index 00000000000..0ed09bc2250 --- /dev/null +++ b/bigquery_storage/quickstart/noxfile_config.py @@ -0,0 +1,42 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be imported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7", "3.7", "3.8"], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": True, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} \ No newline at end of file diff --git a/bigquery_storage/snippets/noxfile_config.py b/bigquery_storage/snippets/noxfile_config.py new file mode 100644 index 00000000000..0ed09bc2250 --- /dev/null +++ b/bigquery_storage/snippets/noxfile_config.py @@ -0,0 +1,42 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be imported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7", "3.7", "3.8"], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": True, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} \ No newline at end of file diff --git a/bigquery_storage/to_dataframe/noxfile_config.py b/bigquery_storage/to_dataframe/noxfile_config.py new file mode 100644 index 00000000000..0ed09bc2250 --- /dev/null +++ b/bigquery_storage/to_dataframe/noxfile_config.py @@ -0,0 +1,42 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be imported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7", "3.7", "3.8"], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": True, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} \ No newline at end of file From a4646c4b43e0659cd206d0e739b3a8c129b78d04 Mon Sep 17 00:00:00 2001 From: Linchin Date: Wed, 27 Aug 2025 22:38:18 +0000 Subject: [PATCH 332/338] add tests for python 3.7 and 3.8 --- bigquery_storage/noxfile_config.py | 2 +- bigquery_storage/pyarrow/noxfile_config.py | 2 +- bigquery_storage/quickstart/noxfile_config.py | 2 +- bigquery_storage/snippets/noxfile_config.py | 2 +- bigquery_storage/to_dataframe/noxfile_config.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/noxfile_config.py b/bigquery_storage/noxfile_config.py index 0ed09bc2250..6a351a66b1a 100644 --- a/bigquery_storage/noxfile_config.py +++ b/bigquery_storage/noxfile_config.py @@ -22,7 +22,7 @@ TEST_CONFIG_OVERRIDE = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7", "3.7", "3.8"], + "ignored_versions": ["2.7"], # Old samples are opted out of enforcing Python type hints # All new samples should feature them "enforce_type_hints": True, diff --git a/bigquery_storage/pyarrow/noxfile_config.py b/bigquery_storage/pyarrow/noxfile_config.py index 0ed09bc2250..6a351a66b1a 100644 --- a/bigquery_storage/pyarrow/noxfile_config.py +++ b/bigquery_storage/pyarrow/noxfile_config.py @@ -22,7 +22,7 @@ TEST_CONFIG_OVERRIDE = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7", "3.7", "3.8"], + "ignored_versions": ["2.7"], # Old samples are opted out of enforcing Python type hints # All new samples should feature them "enforce_type_hints": True, diff --git a/bigquery_storage/quickstart/noxfile_config.py b/bigquery_storage/quickstart/noxfile_config.py index 0ed09bc2250..6a351a66b1a 100644 --- a/bigquery_storage/quickstart/noxfile_config.py +++ b/bigquery_storage/quickstart/noxfile_config.py @@ -22,7 +22,7 @@ TEST_CONFIG_OVERRIDE = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7", "3.7", "3.8"], + "ignored_versions": ["2.7"], # Old samples are opted out of enforcing Python type hints # All new samples should feature them "enforce_type_hints": True, diff --git a/bigquery_storage/snippets/noxfile_config.py b/bigquery_storage/snippets/noxfile_config.py index 0ed09bc2250..6a351a66b1a 100644 --- a/bigquery_storage/snippets/noxfile_config.py +++ b/bigquery_storage/snippets/noxfile_config.py @@ -22,7 +22,7 @@ TEST_CONFIG_OVERRIDE = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7", "3.7", "3.8"], + "ignored_versions": ["2.7"], # Old samples are opted out of enforcing Python type hints # All new samples should feature them "enforce_type_hints": True, diff --git a/bigquery_storage/to_dataframe/noxfile_config.py b/bigquery_storage/to_dataframe/noxfile_config.py index 0ed09bc2250..6a351a66b1a 100644 --- a/bigquery_storage/to_dataframe/noxfile_config.py +++ b/bigquery_storage/to_dataframe/noxfile_config.py @@ -22,7 +22,7 @@ TEST_CONFIG_OVERRIDE = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7", "3.7", "3.8"], + "ignored_versions": ["2.7"], # Old samples are opted out of enforcing Python type hints # All new samples should feature them "enforce_type_hints": True, From d944ca98976344499482b3b9c07b5851a20abf3f Mon Sep 17 00:00:00 2001 From: Linchin Date: Wed, 27 Aug 2025 22:41:59 +0000 Subject: [PATCH 333/338] delete noxfile.py in each subfolder --- bigquery_storage/pyarrow/noxfile.py | 292 ----------------------- bigquery_storage/quickstart/noxfile.py | 292 ----------------------- bigquery_storage/snippets/noxfile.py | 292 ----------------------- bigquery_storage/to_dataframe/noxfile.py | 292 ----------------------- 4 files changed, 1168 deletions(-) delete mode 100644 bigquery_storage/pyarrow/noxfile.py delete mode 100644 bigquery_storage/quickstart/noxfile.py delete mode 100644 bigquery_storage/snippets/noxfile.py delete mode 100644 bigquery_storage/to_dataframe/noxfile.py diff --git a/bigquery_storage/pyarrow/noxfile.py b/bigquery_storage/pyarrow/noxfile.py deleted file mode 100644 index 494639d2fa5..00000000000 --- a/bigquery_storage/pyarrow/noxfile.py +++ /dev/null @@ -1,292 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import glob -import os -from pathlib import Path -import sys -from typing import Callable, Dict, Optional - -import nox - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" - -# Copy `noxfile_config.py` to your directory and modify it instead. - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - "ignored_versions": [], - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - "enforce_type_hints": False, - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # If you need to use a specific version of pip, - # change pip_version_override to the string representation - # of the version number, for example, "20.2.4" - "pip_version_override": None, - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - "envs": {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars() -> Dict[str, str]: - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] - # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( - "True", - "true", -) - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - -# -# Style Checks -# - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8") - else: - session.install("flake8", "flake8-annotations") - - args = FLAKE8_COMMON_ARGS + [ - ".", - ] - session.run("flake8", *args) - - -# -# Black -# - - -@nox.session -def blacken(session: nox.sessions.Session) -> None: - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - session.run("black", *python_files) - - -# -# format = isort + black -# - - -@nox.session -def format(session: nox.sessions.Session) -> None: - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run("isort", "--fss", *python_files) - session.run("black", *python_files) - - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: - # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( - "**/test_*.py", recursive=True - ) - test_list.extend(glob.glob("**/tests", recursive=True)) - - if len(test_list) == 0: - print("No tests found, skipping directory.") - return - - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - concurrent_args = [] - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - with open("requirements.txt") as rfile: - packages = rfile.read() - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - with open("requirements-test.txt") as rtfile: - packages += rtfile.read() - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - if "pytest-parallel" in packages: - concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) - elif "pytest-xdist" in packages: - concurrent_args.extend(["-n", "auto"]) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session: nox.sessions.Session) -> None: - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) - - -# -# Readmegen -# - - -def _get_repo_root() -> Optional[str]: - """Returns the root folder of the project.""" - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - # .git is not available in repos cloned via Cloud Build - # setup.py is always in the library's root, so use that instead - # https://github.com/googleapis/synthtool/issues/792 - if Path(p / "setup.py").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session: nox.sessions.Session, path: str) -> None: - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) diff --git a/bigquery_storage/quickstart/noxfile.py b/bigquery_storage/quickstart/noxfile.py deleted file mode 100644 index 494639d2fa5..00000000000 --- a/bigquery_storage/quickstart/noxfile.py +++ /dev/null @@ -1,292 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import glob -import os -from pathlib import Path -import sys -from typing import Callable, Dict, Optional - -import nox - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" - -# Copy `noxfile_config.py` to your directory and modify it instead. - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - "ignored_versions": [], - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - "enforce_type_hints": False, - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # If you need to use a specific version of pip, - # change pip_version_override to the string representation - # of the version number, for example, "20.2.4" - "pip_version_override": None, - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - "envs": {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars() -> Dict[str, str]: - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] - # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( - "True", - "true", -) - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - -# -# Style Checks -# - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8") - else: - session.install("flake8", "flake8-annotations") - - args = FLAKE8_COMMON_ARGS + [ - ".", - ] - session.run("flake8", *args) - - -# -# Black -# - - -@nox.session -def blacken(session: nox.sessions.Session) -> None: - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - session.run("black", *python_files) - - -# -# format = isort + black -# - - -@nox.session -def format(session: nox.sessions.Session) -> None: - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run("isort", "--fss", *python_files) - session.run("black", *python_files) - - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: - # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( - "**/test_*.py", recursive=True - ) - test_list.extend(glob.glob("**/tests", recursive=True)) - - if len(test_list) == 0: - print("No tests found, skipping directory.") - return - - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - concurrent_args = [] - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - with open("requirements.txt") as rfile: - packages = rfile.read() - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - with open("requirements-test.txt") as rtfile: - packages += rtfile.read() - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - if "pytest-parallel" in packages: - concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) - elif "pytest-xdist" in packages: - concurrent_args.extend(["-n", "auto"]) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session: nox.sessions.Session) -> None: - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) - - -# -# Readmegen -# - - -def _get_repo_root() -> Optional[str]: - """Returns the root folder of the project.""" - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - # .git is not available in repos cloned via Cloud Build - # setup.py is always in the library's root, so use that instead - # https://github.com/googleapis/synthtool/issues/792 - if Path(p / "setup.py").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session: nox.sessions.Session, path: str) -> None: - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) diff --git a/bigquery_storage/snippets/noxfile.py b/bigquery_storage/snippets/noxfile.py deleted file mode 100644 index 494639d2fa5..00000000000 --- a/bigquery_storage/snippets/noxfile.py +++ /dev/null @@ -1,292 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import glob -import os -from pathlib import Path -import sys -from typing import Callable, Dict, Optional - -import nox - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" - -# Copy `noxfile_config.py` to your directory and modify it instead. - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - "ignored_versions": [], - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - "enforce_type_hints": False, - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # If you need to use a specific version of pip, - # change pip_version_override to the string representation - # of the version number, for example, "20.2.4" - "pip_version_override": None, - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - "envs": {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars() -> Dict[str, str]: - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] - # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( - "True", - "true", -) - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - -# -# Style Checks -# - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8") - else: - session.install("flake8", "flake8-annotations") - - args = FLAKE8_COMMON_ARGS + [ - ".", - ] - session.run("flake8", *args) - - -# -# Black -# - - -@nox.session -def blacken(session: nox.sessions.Session) -> None: - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - session.run("black", *python_files) - - -# -# format = isort + black -# - - -@nox.session -def format(session: nox.sessions.Session) -> None: - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run("isort", "--fss", *python_files) - session.run("black", *python_files) - - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: - # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( - "**/test_*.py", recursive=True - ) - test_list.extend(glob.glob("**/tests", recursive=True)) - - if len(test_list) == 0: - print("No tests found, skipping directory.") - return - - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - concurrent_args = [] - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - with open("requirements.txt") as rfile: - packages = rfile.read() - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - with open("requirements-test.txt") as rtfile: - packages += rtfile.read() - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - if "pytest-parallel" in packages: - concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) - elif "pytest-xdist" in packages: - concurrent_args.extend(["-n", "auto"]) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session: nox.sessions.Session) -> None: - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) - - -# -# Readmegen -# - - -def _get_repo_root() -> Optional[str]: - """Returns the root folder of the project.""" - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - # .git is not available in repos cloned via Cloud Build - # setup.py is always in the library's root, so use that instead - # https://github.com/googleapis/synthtool/issues/792 - if Path(p / "setup.py").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session: nox.sessions.Session, path: str) -> None: - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) diff --git a/bigquery_storage/to_dataframe/noxfile.py b/bigquery_storage/to_dataframe/noxfile.py deleted file mode 100644 index 494639d2fa5..00000000000 --- a/bigquery_storage/to_dataframe/noxfile.py +++ /dev/null @@ -1,292 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import glob -import os -from pathlib import Path -import sys -from typing import Callable, Dict, Optional - -import nox - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" - -# Copy `noxfile_config.py` to your directory and modify it instead. - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - "ignored_versions": [], - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - "enforce_type_hints": False, - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # If you need to use a specific version of pip, - # change pip_version_override to the string representation - # of the version number, for example, "20.2.4" - "pip_version_override": None, - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - "envs": {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars() -> Dict[str, str]: - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] - # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( - "True", - "true", -) - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - -# -# Style Checks -# - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8") - else: - session.install("flake8", "flake8-annotations") - - args = FLAKE8_COMMON_ARGS + [ - ".", - ] - session.run("flake8", *args) - - -# -# Black -# - - -@nox.session -def blacken(session: nox.sessions.Session) -> None: - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - session.run("black", *python_files) - - -# -# format = isort + black -# - - -@nox.session -def format(session: nox.sessions.Session) -> None: - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run("isort", "--fss", *python_files) - session.run("black", *python_files) - - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: - # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( - "**/test_*.py", recursive=True - ) - test_list.extend(glob.glob("**/tests", recursive=True)) - - if len(test_list) == 0: - print("No tests found, skipping directory.") - return - - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - concurrent_args = [] - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - with open("requirements.txt") as rfile: - packages = rfile.read() - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - with open("requirements-test.txt") as rtfile: - packages += rtfile.read() - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - if "pytest-parallel" in packages: - concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) - elif "pytest-xdist" in packages: - concurrent_args.extend(["-n", "auto"]) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session: nox.sessions.Session) -> None: - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) - - -# -# Readmegen -# - - -def _get_repo_root() -> Optional[str]: - """Returns the root folder of the project.""" - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - # .git is not available in repos cloned via Cloud Build - # setup.py is always in the library's root, so use that instead - # https://github.com/googleapis/synthtool/issues/792 - if Path(p / "setup.py").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session: nox.sessions.Session, path: str) -> None: - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) From 93ee955b4a10050df3c741c99a387bcfced9dd8b Mon Sep 17 00:00:00 2001 From: Linchin Date: Wed, 27 Aug 2025 22:42:28 +0000 Subject: [PATCH 334/338] delete noxfile.py in the bqstorage root folder --- bigquery_storage/noxfile_config.py | 42 ------------------------------ 1 file changed, 42 deletions(-) delete mode 100644 bigquery_storage/noxfile_config.py diff --git a/bigquery_storage/noxfile_config.py b/bigquery_storage/noxfile_config.py deleted file mode 100644 index 6a351a66b1a..00000000000 --- a/bigquery_storage/noxfile_config.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Default TEST_CONFIG_OVERRIDE for python repos. - -# You can copy this file into your directory, then it will be imported from -# the noxfile.py. - -# The source of truth: -# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py - -TEST_CONFIG_OVERRIDE = { - # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - "enforce_type_hints": True, - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # If you need to use a specific version of pip, - # change pip_version_override to the string representation - # of the version number, for example, "20.2.4" - "pip_version_override": None, - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - "envs": {}, -} \ No newline at end of file From f8fa57a04bf98594e89abb69aa7bd28dff1717a9 Mon Sep 17 00:00:00 2001 From: Linchin Date: Wed, 27 Aug 2025 23:29:58 +0000 Subject: [PATCH 335/338] fix some lint errors --- bigquery_storage/conftest.py | 9 +++++---- .../pyarrow/append_rows_with_arrow.py | 17 ++++++++--------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/bigquery_storage/conftest.py b/bigquery_storage/conftest.py index fad90af1a6d..63d53531471 100644 --- a/bigquery_storage/conftest.py +++ b/bigquery_storage/conftest.py @@ -15,19 +15,20 @@ import datetime import os import random +from typing import Generator + +from google.cloud import bigquery import pytest @pytest.fixture(scope="session") -def project_id(): +def project_id() -> str: return os.environ["GOOGLE_CLOUD_PROJECT"] @pytest.fixture(scope="session") -def dataset(project_id): - from google.cloud import bigquery - +def dataset(project_id: str) -> Generator[bigquery.Dataset, None, None]: client = bigquery.Client() # Add a random suffix to dataset name to avoid conflict, because we run diff --git a/bigquery_storage/pyarrow/append_rows_with_arrow.py b/bigquery_storage/pyarrow/append_rows_with_arrow.py index e96077299fd..21af9b88cee 100644 --- a/bigquery_storage/pyarrow/append_rows_with_arrow.py +++ b/bigquery_storage/pyarrow/append_rows_with_arrow.py @@ -16,13 +16,14 @@ import datetime import decimal +from google.cloud import bigquery +from google.cloud import bigquery_storage_v1 from google.cloud.bigquery import enums +from google.cloud.bigquery_storage_v1 import types as gapic_types +from google.cloud.bigquery_storage_v1.writer import AppendRowsStream import pandas as pd import pyarrow as pa -from google.cloud import bigquery -from google.cloud.bigquery_storage_v1 import types as gapic_types -from google.cloud.bigquery_storage_v1.writer import AppendRowsStream TABLE_LENGTH = 100_000 @@ -84,13 +85,11 @@ ) -def bqstorage_write_client(): - from google.cloud import bigquery_storage_v1 - +def bqstorage_write_client() -> bigquery_storage_v1.BigQueryWriteClient: return bigquery_storage_v1.BigQueryWriteClient() -def make_table(project_id, dataset_id, bq_client): +def make_table(project_id: str, dataset_id: str, bq_client: bigquery.Client) -> bigquery.Table: table_id = "append_rows_w_arrow_test" table_id_full = f"{project_id}.{dataset_id}.{table_id}" bq_table = bigquery.Table(table_id_full, schema=BQ_SCHEMA) @@ -99,7 +98,7 @@ def make_table(project_id, dataset_id, bq_client): return created_table -def create_stream(bqstorage_write_client, table): +def create_stream(bqstorage_write_client: bigquery_storage_v1.BigQueryWriteClient, table: bigquery.Table) -> AppendRowsStream: stream_name = f"projects/{table.project}/datasets/{table.dataset_id}/tables/{table.table_id}/_default" request_template = gapic_types.AppendRowsRequest() request_template.write_stream = stream_name @@ -116,7 +115,7 @@ def create_stream(bqstorage_write_client, table): return append_rows_stream -def generate_pyarrow_table(num_rows=TABLE_LENGTH): +def generate_pyarrow_table(num_rows: int = TABLE_LENGTH) -> pa.Table: date_1 = datetime.date(2020, 10, 1) date_2 = datetime.date(2021, 10, 1) From b8f758add05d2dfe93fcbc362d4f1ac3faa7462d Mon Sep 17 00:00:00 2001 From: Linchin Date: Wed, 27 Aug 2025 23:47:03 +0000 Subject: [PATCH 336/338] lint --- bigquery_storage/pyarrow/noxfile_config.py | 4 ++-- bigquery_storage/quickstart/noxfile_config.py | 2 +- bigquery_storage/snippets/noxfile_config.py | 2 +- bigquery_storage/to_dataframe/noxfile_config.py | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bigquery_storage/pyarrow/noxfile_config.py b/bigquery_storage/pyarrow/noxfile_config.py index 6a351a66b1a..29edb31ffe8 100644 --- a/bigquery_storage/pyarrow/noxfile_config.py +++ b/bigquery_storage/pyarrow/noxfile_config.py @@ -2,7 +2,7 @@ # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -# You may obtain a copy of the License at +# You maye obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # @@ -39,4 +39,4 @@ # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. "envs": {}, -} \ No newline at end of file +} diff --git a/bigquery_storage/quickstart/noxfile_config.py b/bigquery_storage/quickstart/noxfile_config.py index 6a351a66b1a..f1fa9e5618b 100644 --- a/bigquery_storage/quickstart/noxfile_config.py +++ b/bigquery_storage/quickstart/noxfile_config.py @@ -39,4 +39,4 @@ # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. "envs": {}, -} \ No newline at end of file +} diff --git a/bigquery_storage/snippets/noxfile_config.py b/bigquery_storage/snippets/noxfile_config.py index 6a351a66b1a..f1fa9e5618b 100644 --- a/bigquery_storage/snippets/noxfile_config.py +++ b/bigquery_storage/snippets/noxfile_config.py @@ -39,4 +39,4 @@ # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. "envs": {}, -} \ No newline at end of file +} diff --git a/bigquery_storage/to_dataframe/noxfile_config.py b/bigquery_storage/to_dataframe/noxfile_config.py index 6a351a66b1a..f1fa9e5618b 100644 --- a/bigquery_storage/to_dataframe/noxfile_config.py +++ b/bigquery_storage/to_dataframe/noxfile_config.py @@ -39,4 +39,4 @@ # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. "envs": {}, -} \ No newline at end of file +} From 095c552a33b10e7cf5fca27f7d001ce4cf5905e4 Mon Sep 17 00:00:00 2001 From: Linchin Date: Wed, 27 Aug 2025 23:58:35 +0000 Subject: [PATCH 337/338] fix all lint! --- .../pyarrow/append_rows_with_arrow.py | 12 +++++++++--- .../pyarrow/append_rows_with_arrow_test.py | 4 +++- bigquery_storage/quickstart/quickstart.py | 2 +- .../quickstart/quickstart_test.py | 10 +++++++--- .../snippets/append_rows_pending.py | 7 +++---- .../snippets/append_rows_pending_test.py | 5 ++--- .../snippets/append_rows_proto2.py | 5 ++--- .../snippets/append_rows_proto2_test.py | 5 ++--- bigquery_storage/snippets/conftest.py | 19 ++++++++++++------- .../to_dataframe/read_query_results.py | 4 +++- .../to_dataframe/read_query_results_test.py | 4 +++- .../to_dataframe/read_table_bigquery.py | 5 ++++- .../to_dataframe/read_table_bigquery_test.py | 4 +++- .../to_dataframe/read_table_bqstorage.py | 4 +++- .../to_dataframe/read_table_bqstorage_test.py | 4 +++- 15 files changed, 60 insertions(+), 34 deletions(-) diff --git a/bigquery_storage/pyarrow/append_rows_with_arrow.py b/bigquery_storage/pyarrow/append_rows_with_arrow.py index 21af9b88cee..78cb0a57573 100644 --- a/bigquery_storage/pyarrow/append_rows_with_arrow.py +++ b/bigquery_storage/pyarrow/append_rows_with_arrow.py @@ -13,8 +13,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from concurrent.futures import Future import datetime import decimal +from typing import Iterable from google.cloud import bigquery from google.cloud import bigquery_storage_v1 @@ -158,7 +160,9 @@ def generate_pyarrow_table(num_rows: int = TABLE_LENGTH) -> pa.Table: return table -def generate_write_requests(pyarrow_table): +def generate_write_requests( + pyarrow_table: pa.Table, +) -> Iterable[gapic_types.AppendRowsRequest]: # Determine max_chunksize of the record batches. Because max size of # AppendRowsRequest is 10 MB, we need to split the table if it's too big. # See: https://cloud.google.com/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1#appendrowsrequest @@ -173,7 +177,9 @@ def generate_write_requests(pyarrow_table): yield request -def verify_result(client, table, futures): +def verify_result( + client: bigquery.Client, table: bigquery.Table, futures: "list[Future]" +) -> None: bq_table = client.get_table(table) # Verify table schema. @@ -190,7 +196,7 @@ def verify_result(client, table, futures): assert len(futures) == 2 -def main(project_id, dataset): +def main(project_id: str, dataset: bigquery.Dataset) -> None: # Initialize clients. write_client = bqstorage_write_client() bq_client = bigquery.Client() diff --git a/bigquery_storage/pyarrow/append_rows_with_arrow_test.py b/bigquery_storage/pyarrow/append_rows_with_arrow_test.py index 2b1c8f1da36..f31de43b51f 100644 --- a/bigquery_storage/pyarrow/append_rows_with_arrow_test.py +++ b/bigquery_storage/pyarrow/append_rows_with_arrow_test.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +from google.cloud import bigquery + from . import append_rows_with_arrow -def test_append_rows_with_arrow(project_id, dataset): +def test_append_rows_with_arrow(project_id: str, dataset: bigquery.Dataset) -> None: append_rows_with_arrow.main(project_id, dataset) diff --git a/bigquery_storage/quickstart/quickstart.py b/bigquery_storage/quickstart/quickstart.py index 2d065083655..6f120ce9a58 100644 --- a/bigquery_storage/quickstart/quickstart.py +++ b/bigquery_storage/quickstart/quickstart.py @@ -15,7 +15,7 @@ import argparse -def main(project_id="your-project-id", snapshot_millis=0): +def main(project_id: str = "your-project-id", snapshot_millis: int = 0) -> None: # [START bigquerystorage_quickstart] from google.cloud.bigquery_storage import BigQueryReadClient, types diff --git a/bigquery_storage/quickstart/quickstart_test.py b/bigquery_storage/quickstart/quickstart_test.py index 8e1e0dfdef5..3380c923847 100644 --- a/bigquery_storage/quickstart/quickstart_test.py +++ b/bigquery_storage/quickstart/quickstart_test.py @@ -14,23 +14,27 @@ import datetime +import pytest + from . import quickstart -def now_millis(): +def now_millis() -> int: return int( (datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)).total_seconds() * 1000 ) -def test_quickstart_wo_snapshot(capsys, project_id): +def test_quickstart_wo_snapshot(capsys: pytest.CaptureFixture, project_id: str) -> None: quickstart.main(project_id) out, _ = capsys.readouterr() assert "unique names in states: WA" in out -def test_quickstart_with_snapshot(capsys, project_id): +def test_quickstart_with_snapshot( + capsys: pytest.CaptureFixture, project_id: str +) -> None: quickstart.main(project_id, now_millis() - 5000) out, _ = capsys.readouterr() assert "unique names in states: WA" in out diff --git a/bigquery_storage/snippets/append_rows_pending.py b/bigquery_storage/snippets/append_rows_pending.py index ac47828dcdd..3c34b472cde 100644 --- a/bigquery_storage/snippets/append_rows_pending.py +++ b/bigquery_storage/snippets/append_rows_pending.py @@ -18,10 +18,9 @@ using the low-level generated client for Python. """ -from google.protobuf import descriptor_pb2 - from google.cloud import bigquery_storage_v1 from google.cloud.bigquery_storage_v1 import types, writer +from google.protobuf import descriptor_pb2 # If you update the customer_record.proto protocol buffer definition, run: # @@ -31,14 +30,14 @@ from . import customer_record_pb2 -def create_row_data(row_num: int, name: str): +def create_row_data(row_num: int, name: str) -> bytes: row = customer_record_pb2.CustomerRecord() row.row_num = row_num row.customer_name = name return row.SerializeToString() -def append_rows_pending(project_id: str, dataset_id: str, table_id: str): +def append_rows_pending(project_id: str, dataset_id: str, table_id: str) -> None: """Create a write stream, write some sample data, and commit the stream.""" write_client = bigquery_storage_v1.BigQueryWriteClient() parent = write_client.table_path(project_id, dataset_id, table_id) diff --git a/bigquery_storage/snippets/append_rows_pending_test.py b/bigquery_storage/snippets/append_rows_pending_test.py index 9af6957071f..791e9609779 100644 --- a/bigquery_storage/snippets/append_rows_pending_test.py +++ b/bigquery_storage/snippets/append_rows_pending_test.py @@ -15,9 +15,8 @@ import pathlib import random -import pytest - from google.cloud import bigquery +import pytest from . import append_rows_pending @@ -51,7 +50,7 @@ def test_append_rows_pending( capsys: pytest.CaptureFixture, bigquery_client: bigquery.Client, sample_data_table: str, -): +) -> None: project_id, dataset_id, table_id = sample_data_table.split(".") append_rows_pending.append_rows_pending( project_id=project_id, dataset_id=dataset_id, table_id=table_id diff --git a/bigquery_storage/snippets/append_rows_proto2.py b/bigquery_storage/snippets/append_rows_proto2.py index 7f3d88a7a3c..d610b31faa2 100644 --- a/bigquery_storage/snippets/append_rows_proto2.py +++ b/bigquery_storage/snippets/append_rows_proto2.py @@ -20,10 +20,9 @@ import datetime import decimal -from google.protobuf import descriptor_pb2 - from google.cloud import bigquery_storage_v1 from google.cloud.bigquery_storage_v1 import types, writer +from google.protobuf import descriptor_pb2 # If you make updates to the sample_data.proto protocol buffers definition, # run: @@ -34,7 +33,7 @@ from . import sample_data_pb2 -def append_rows_proto2(project_id: str, dataset_id: str, table_id: str): +def append_rows_proto2(project_id: str, dataset_id: str, table_id: str) -> None: """Create a write stream, write some sample data, and commit the stream.""" write_client = bigquery_storage_v1.BigQueryWriteClient() parent = write_client.table_path(project_id, dataset_id, table_id) diff --git a/bigquery_storage/snippets/append_rows_proto2_test.py b/bigquery_storage/snippets/append_rows_proto2_test.py index 904f17ddab9..15e5b9d9105 100644 --- a/bigquery_storage/snippets/append_rows_proto2_test.py +++ b/bigquery_storage/snippets/append_rows_proto2_test.py @@ -17,9 +17,8 @@ import pathlib import random -import pytest - from google.cloud import bigquery +import pytest from . import append_rows_proto2 @@ -53,7 +52,7 @@ def test_append_rows_proto2( capsys: pytest.CaptureFixture, bigquery_client: bigquery.Client, sample_data_table: str, -): +) -> None: project_id, dataset_id, table_id = sample_data_table.split(".") append_rows_proto2.append_rows_proto2( project_id=project_id, dataset_id=dataset_id, table_id=table_id diff --git a/bigquery_storage/snippets/conftest.py b/bigquery_storage/snippets/conftest.py index a186291ef07..5f1e958183c 100644 --- a/bigquery_storage/snippets/conftest.py +++ b/bigquery_storage/snippets/conftest.py @@ -12,16 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest -import test_utils.prefixer +from typing import Generator from google.cloud import bigquery +import pytest +import test_utils.prefixer prefixer = test_utils.prefixer.Prefixer("python-bigquery-storage", "samples/snippets") @pytest.fixture(scope="session", autouse=True) -def cleanup_datasets(bigquery_client: bigquery.Client): +def cleanup_datasets(bigquery_client: bigquery.Client) -> None: for dataset in bigquery_client.list_datasets(): if prefixer.should_cleanup(dataset.dataset_id): bigquery_client.delete_dataset( @@ -30,17 +31,19 @@ def cleanup_datasets(bigquery_client: bigquery.Client): @pytest.fixture(scope="session") -def bigquery_client(): +def bigquery_client() -> bigquery.Client: return bigquery.Client() @pytest.fixture(scope="session") -def project_id(bigquery_client): +def project_id(bigquery_client: bigquery.Client) -> str: return bigquery_client.project @pytest.fixture(scope="session") -def dataset_id(bigquery_client: bigquery.Client, project_id: str): +def dataset_id( + bigquery_client: bigquery.Client, project_id: str +) -> Generator[str, None, None]: dataset_id = prefixer.create_prefix() full_dataset_id = f"{project_id}.{dataset_id}" dataset = bigquery.Dataset(full_dataset_id) @@ -50,7 +53,9 @@ def dataset_id(bigquery_client: bigquery.Client, project_id: str): @pytest.fixture(scope="session") -def dataset_id_non_us(bigquery_client: bigquery.Client, project_id: str): +def dataset_id_non_us( + bigquery_client: bigquery.Client, project_id: str +) -> Generator[str, None, None]: dataset_id = prefixer.create_prefix() full_dataset_id = f"{project_id}.{dataset_id}" dataset = bigquery.Dataset(full_dataset_id) diff --git a/bigquery_storage/to_dataframe/read_query_results.py b/bigquery_storage/to_dataframe/read_query_results.py index 45bae1eac44..e947e8afe93 100644 --- a/bigquery_storage/to_dataframe/read_query_results.py +++ b/bigquery_storage/to_dataframe/read_query_results.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pandas -def read_query_results(): + +def read_query_results() -> pandas.DataFrame: # [START bigquerystorage_pandas_tutorial_read_query_results] from google.cloud import bigquery diff --git a/bigquery_storage/to_dataframe/read_query_results_test.py b/bigquery_storage/to_dataframe/read_query_results_test.py index 55b55a08235..b5cb5517401 100644 --- a/bigquery_storage/to_dataframe/read_query_results_test.py +++ b/bigquery_storage/to_dataframe/read_query_results_test.py @@ -12,10 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest + from . import read_query_results -def test_read_query_results(capsys): +def test_read_query_results(capsys: pytest.CaptureFixture) -> None: read_query_results.read_query_results() out, _ = capsys.readouterr() assert "stackoverflow" in out diff --git a/bigquery_storage/to_dataframe/read_table_bigquery.py b/bigquery_storage/to_dataframe/read_table_bigquery.py index 82d8879b1c8..7a69a64d77d 100644 --- a/bigquery_storage/to_dataframe/read_table_bigquery.py +++ b/bigquery_storage/to_dataframe/read_table_bigquery.py @@ -13,7 +13,10 @@ # limitations under the License. -def read_table(): +import pandas + + +def read_table() -> pandas.DataFrame: # [START bigquerystorage_pandas_tutorial_read_table] from google.cloud import bigquery diff --git a/bigquery_storage/to_dataframe/read_table_bigquery_test.py b/bigquery_storage/to_dataframe/read_table_bigquery_test.py index c8301857108..5b45c4d5163 100644 --- a/bigquery_storage/to_dataframe/read_table_bigquery_test.py +++ b/bigquery_storage/to_dataframe/read_table_bigquery_test.py @@ -12,10 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest + from . import read_table_bigquery -def test_read_table(capsys): +def test_read_table(capsys: pytest.CaptureFixture) -> None: read_table_bigquery.read_table() out, _ = capsys.readouterr() assert "country_name" in out diff --git a/bigquery_storage/to_dataframe/read_table_bqstorage.py b/bigquery_storage/to_dataframe/read_table_bqstorage.py index 31b21618b49..ce1cd3872ae 100644 --- a/bigquery_storage/to_dataframe/read_table_bqstorage.py +++ b/bigquery_storage/to_dataframe/read_table_bqstorage.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pandas as pd -def read_table(your_project_id): + +def read_table(your_project_id: str) -> pd.DataFrame: original_your_project_id = your_project_id # [START bigquerystorage_pandas_tutorial_read_session] your_project_id = "project-for-read-session" diff --git a/bigquery_storage/to_dataframe/read_table_bqstorage_test.py b/bigquery_storage/to_dataframe/read_table_bqstorage_test.py index cc09307836c..7b46a6b180a 100644 --- a/bigquery_storage/to_dataframe/read_table_bqstorage_test.py +++ b/bigquery_storage/to_dataframe/read_table_bqstorage_test.py @@ -12,10 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest + from . import read_table_bqstorage -def test_read_table(capsys, project_id): +def test_read_table(capsys: pytest.CaptureFixture, project_id: str) -> None: read_table_bqstorage.read_table(your_project_id=project_id) out, _ = capsys.readouterr() assert "species_common_name" in out From 8e8eeb4a49dddced84b40d50f802dbe55dc00fe5 Mon Sep 17 00:00:00 2001 From: Linchin Date: Thu, 28 Aug 2025 00:06:10 +0000 Subject: [PATCH 338/338] add dependency to fix test failure --- bigquery_storage/quickstart/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/bigquery_storage/quickstart/requirements.txt b/bigquery_storage/quickstart/requirements.txt index 7ddaaef724a..9d69822935d 100644 --- a/bigquery_storage/quickstart/requirements.txt +++ b/bigquery_storage/quickstart/requirements.txt @@ -1,2 +1,3 @@ fastavro +google-cloud-bigquery google-cloud-bigquery-storage==2.32.0